Exemplo n.º 1
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 public static void tst_find_schedule()
 {
     int[] fv = new int[] {-1, 0, 1, 1, 1, 0, 5, 6, 7, 8};
     double[] ps = new double[fv.Length];
     for (int i = 0; i < ps.Length; i++)
     {
         if (i < 5)
         {
             ps[i] = 0.4;
         }
         else
         {
             ps[i] = 0.8;
         }
     }
     LossTree t = new LossTree(fv, ps, 8);
     G.VB = true;
     t.find_schedule(5, 2);
     foreach (Node n in t.nodes)
     {
         Console.WriteLine("Node {0}: q = {1}", n.ID, n.q);
     }
 }
Exemplo n.º 2
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 public static void tst_find_schedule2()
 {
     int[] fv = new int[] {-1, 0, 1, 1, 0};
     double[] ps = new double[] {1, 0.8, 0.6, 0.2, 0.2};
     LossTree t = new LossTree(fv, ps, 8);
     G.VB = true;
     t.find_schedule(5, 3);
     foreach (Node n in t.nodes)
     {
         Console.WriteLine("Node {0}: q = {1}", n.ID, n.q);
     }
 }
Exemplo n.º 3
0
Arquivo: net4.cs Projeto: buguen/wsnpy
    // Generate multiple graphs about the different metrics as a function of
    // size.
    // Generate a different file for every source_min
    public static void graphRateSize3(int tst_nr, int n_averages, int plot)
    {
        Console.WriteLine("Executing {0}({1:d2},{2:d6},{3})", G.current(),
                tst_nr, n_averages, plot);
        double tx_rg = 2;
        double[] xv = null;
        double[] yv = null;
        if (tst_nr == 0)
        {
            xv = new double[] {tx_rg, 2 * tx_rg};
            yv = new double[] {tx_rg,     tx_rg};
        }
        else if (tst_nr == 1)
        {
            xv = G.linspace(tx_rg, 5 * tx_rg, 5);
            yv = G.linspace(2 * tx_rg, 2.01 * tx_rg, 5);
        }
        if (xv.GetLength(0) != yv.GetLength(0))
        {
            throw new Exception("xv and yv should have equal length");
        }
        double rho = 15;
        int sched_lgth = 10;
        int n_tx_frames = 2000;
        double opt = 0;
        int[] types = new int[] {0, 1, 2, 3, 4, 5};
        double[] rate_v = G.linspace(0.5, 3, 28);
        int buffer_size = 30;
        double[,] tota = new double[xv.GetLength(0), types.Length];
        double[,] mean = new double[xv.GetLength(0), types.Length];
        double[,] pmin = new double[xv.GetLength(0), types.Length];
        double[,] ropt = new double[xv.GetLength(0), types.Length];
        Pgf g = new Pgf();
        string xlab = "normalized x size";
        double[] xn = new double[xv.Length];
        for (int q = 0; q < xn.Length; q++)
        {
            xn[q] = xv[q] / tx_rg;
        }
        for (double frac_source_min = 0.2; frac_source_min < 1;
             frac_source_min += 0.3)
        {
                for (int k = 0; k < n_averages; k++)
                {
                    Console.WriteLine("Repetition {0,4:D}. Total {1}",
                                k, G.elapsed());
                    for (int s = 0; s < xv.GetLength(0); s++)
                    {

                        Console.WriteLine("s={0,2}, x/t={1,4:F}.  Total {2}",
                                          s, xv[s] / tx_rg, G.elapsed());
                        int n = (int)(rho*xv[s]*yv[s]/Math.PI/tx_rg/tx_rg);
                        int source_min = (int) (frac_source_min * n);
                        G.rgen = new Random(k);
                        int[] fv = RandomTree.parents(n, xv[s], yv[s], tx_rg);
                        double[] ps = new double[n];
                        for (int u = 0; u < n; u++)
                        {
                            ps[u] = 0.5 + G.rgen.NextDouble() / 2;
                        }
                        for (int i = 0; i < types.Length; i++)
                        {
                            double m_tota = 0;
                            double m_mean = 0;
                            double m_pmin = 0;
                            double m_ropt = rate_v[0];
                            for (int j = 0; j < rate_v.Length; j++)
                            {
                                LossTree t = new LossTree(fv,ps,buffer_size);
                                if (types[i] == 3)
                                {
                                    t.find_schedule(sched_lgth, source_min);
                                }
                                int[] results = t.simulate_it(n_tx_frames,
                                                    rate_v[j], types[i], k);
                                double a_tota = 0;
                                double a_mean = 0;
                                double a_pmin = 0;
                                foreach (int h in results)
                                {
                                    a_tota += (double) h / n_tx_frames;
                                    a_mean += (double) h / results.Length;
                                    if (h < source_min)
                                    {
                                        a_pmin += (double)1 / results.Length;
                                    }
                                }
                                if (a_tota > m_tota)
                                {
                                    m_tota = a_tota;
                                    m_mean = a_mean;
                                    m_pmin = a_pmin;
                                    m_ropt = rate_v[j];
                                }
                            }
                            tota[s, i] += m_tota / n_averages;
                            mean[s, i] += m_mean / n_averages;
                            pmin[s, i] += m_pmin / n_averages;
                            ropt[s, i] += m_ropt / n_averages;
                        }
                    }
                }
                Console.WriteLine("**** Printing results *****");
                string[] legv = new string[] {"0", "1", "2",
                                              String.Format("3={0:F3}",opt),
                                              "4", "5" };
                string append = String.Format("frac-source-min = {0:f2}",
                                              frac_source_min);
                g.add(xlab, "total " + append);
                g.mplot(xn, tota, legv);
                g.add(xlab, "mean " + append);
                g.mplot(xn, mean, legv);
                g.add(xlab, "pmin " + append);
                g.mplot(xn, pmin, legv);
                g.add(xlab, "ropt " + append);
                g.mplot(xn, ropt, legv);
        }
        string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
                            tst_nr, n_averages);
        Console.WriteLine(filename);
        g.save(filename, plot);
    }
Exemplo n.º 4
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 public static void tst_find_schedule()
 {
     double[] ps;
     int[] fv;
     ps = new double[] { 1, 0.6, 0.3, 0.3, 0.3, 0.3 };
     fv = new int[] { -1, 0, 1, 1, 1, 1 };
     int buffer_size = 20;
     LossTree t = new LossTree(fv, ps, buffer_size);
     t.find_schedule(10, 3);
     G.VB = true;
     t.show_schedule();
     G.VB = false;
     int[] types = new int[] { 0, 2};
     int n_tx_frames = 10000;
     foreach (int type in types)
     {
         int[] results = t.simulate_it(n_tx_frames, 0.6, type, 0);
         double sum = 0.0;
         foreach (int i in results)
         {
             sum += (double)i / n_tx_frames;
         }
         Console.WriteLine("Type {0}, sum = {1}", type, sum);
     }
 }
Exemplo n.º 5
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 // This function computes the average metrics at different rates for
 // different topologies.  This is quite useless, because different
 // metrics have different operation points.  It unfairly shows poor
 // performance of the scheduled approach.  The only useful part may be
 // comparing the unscheduled approaches.
 public static void graphRateRandom(int tst_nr, int n_averages, int plot)
 {
     Console.WriteLine("Executing {0}({1:d2},{2:d6},{3})", G.current(),
             tst_nr, n_averages, plot);
     double tx_rg = 2;
     double x = 5 * tx_rg;
     double y = 5 * tx_rg;
     double rho = 9;
     int n = (int)(rho * x * y / Math.PI / tx_rg / tx_rg);
     int sched_lgth = 10;
     int n_tx_frames = 2000;
     double opt = 0;
     double[] rate_v = G.linspace(0.1, 1.5, 28);
     int source_min = 3;
     int buffer_size = 30;
     int[] types = new int[] {0, 1, 2, 3, 4};
     G.VB = false;
     double[,] tota = new double[rate_v.Length, types.Length];
     double[,] mean = new double[rate_v.Length, types.Length];
     double[,] pmin = new double[rate_v.Length, types.Length];
     for (int k = 0; k < n_averages; k++)
     {
         Console.WriteLine("Repetition {0,4:D}. Total {1}",
                     k, G.elapsed());
         G.rgen = new Random(k);
         int[] fv = RandomTree.parents(n, x, y, tx_rg);
         double[] ps = new double[n];
         for (int u = 0; u < n; u++)
         {
             ps[u] = 0.5 + G.rgen.NextDouble() / 2;
         }
         for (int j = 0; j < rate_v.Length; j++)
         {
             for (int i = 0; i < types.Length; i++)
             {
                 LossTree t = new LossTree(fv, ps, buffer_size);
                 if (types[i] == 3)
                 {
                     t.find_schedule(sched_lgth, source_min);
                     if (k == 0 && j == 0)
                     {
                         opt = ((double)t.count.Count / sched_lgth);
                     }
                 }
                 int[] results = t.simulate_it(n_tx_frames, rate_v[j],
                         types[i], k);
                 foreach (int h in results)
                 {
                     tota[j, i] += (double)h / n_averages / n_tx_frames;
                     mean[j, i] += (double)h / n_averages / results.Length;
                     if (h < source_min)
                     {
                         pmin[j, i] += (double)1 / n_averages /
                             results.Length;
                     }
                 }
             }
         }
     }
     Console.WriteLine("**** Printing results *****");
     string[] legv = new string[] { "0", "1", "2",
         String.Format("3={0:F3}", opt), "4" };
     Pgf g = new Pgf();
     g.add("rate", "total");
     g.mplot(rate_v, tota, legv);
     g.add("rate", "mean");
     g.mplot(rate_v, mean, legv);
     g.add("rate", "pmin");
     g.mplot(rate_v, pmin, legv);
     string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
             tst_nr, n_averages);
     g.save(filename, plot);
 }
Exemplo n.º 6
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 public static void graphRate1(int tst_nr, int n_averages, int plot)
 {
     Console.WriteLine("Executing {0}({1:d2},{2:d6},{3}). Total {4}",
             G.current(), tst_nr, n_averages, plot, G.elapsed());
     int sched_lgth = 10;
     int[] fv;
     int n_tx_frames = 2000;
     double opt = 0;
     double[] ps;
     double[] rate_v = G.linspace(0.1, 1.5, 28);
     int source_min = 3;
     int buffer_size = 30;
     int[] types = new int[] {0, 1, 2, 3, 4, 5};
     G.VB = false;
     // Shows the advantage of the scheduled approach, particularly in
     // terms of
     if (tst_nr == 0) {
         fv = new int[] { -1, 0, 1, 1, 1, 1 };
         ps = new double[] { 1, 0.8, 0.4, 0.4, 0.4, 0.4 }; }
     // No advantage in the scheduled approach
     else if (tst_nr == 1) {
         fv = new int[] { -1, 0, 1, 1, 1, 1 };
         ps = new double[] { 1, 0.4, 0.4, 0.4, 0.4, 0.4 }; }
     // No advantage in scheduled approach
     else if (tst_nr == 2) {
         fv = new int[] { -1, 0, 1, 1, 1, 1 };
         ps = new double[] { 1, 0.4, 0.8, 0.8, 0.8, 0.8 }; }
     // Slight advantage of scheduled approach, all select/discard
     // policies are similar.
     else if (tst_nr == 3) {
         fv = new int[] { -1, 0, 1, 1, 1, 1 };
         ps = new double[] { 1, 0.6, 0.6, 0.6, 0.2, 0.2 }; }
     // Advantage of scheduled approach for low rate, great variety
     // between select/discard policies, but 0 is overall the best.
     else if (tst_nr == 4) {
         fv = new int[] { -1, 0, 1, 1, 1, 1 };
         ps = new double[] { 1, 0.3, 0.6, 0.6, 0.2, 0.2 }; }
     // No schedule advantage and moderate advantage of packet
     // selection.  Strong peak at the optimal.
     else if (tst_nr == 5) {
         fv = new int[] { -1, 0, 1, 2, 3, 4, 5, 5, 5, 5, 5, 5 };
         ps = new double[12];
         for (int i = 0; i < fv.Length; i++)
             ps[i] = 0.3;
     } // No schedule advantage and moderate advantage of packet selection.
     else if (tst_nr == 6) {
         fv = new int[] { -1, 0, 1, 2, 3, 4, 5 };
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++)
             ps[i] = 0.3;
     }
     // Unsuccessful attempt to make the hybrid select/discard topology
     // work.
     else if (tst_nr == 7) {
         fv = new int[] {-1, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2};
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++)
             ps[i] = 0.8;
     }
     else if (tst_nr == 8) {
         fv = new int[] {-1, 0, 0, 1, 1, 1, 2, 2, 2};
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++)
             ps[i] = 0.8;
     }
     else if (tst_nr == 9) {
         fv = new int[] {-1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 9, 10, 11, 12};
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++) {
             if (i < 9)
                 ps[i] = 0.8;
             else
                 ps[i] = 0.4;
         }
     }
     // These parameters are chosen to show the advantage of the
     // scheduled approach vs the unscheduled approach.
     else if (tst_nr == 10) {
         fv = new int[] {-1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 9, 10, 11, 12};
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++) {
             if (i < 9)
                 ps[i] = 0.4;
             else
                 ps[i] = 0.8;
         }
     }
     else if (tst_nr == 11) {
         fv = new int[] {-1, 0, 1, 1, 1, 0, 5, 6, 7, 8};
         ps = new double[fv.Length];
         for (int i = 0; i < ps.Length; i++) {
             if (i < 5) ps[i] = 0.4;
             else ps[i] = 0.8;
         }
     }
     // These parameters were chosen to show that the optimal rate is
     // hard to determine.  In this case, the total is higher for 0.83,
     // despite the fact that it means that node 1 has spare capacity
     // that it could be using to transmit more packets.
     else if (tst_nr == 12) {
         fv = new int[] {-1, 0, 1, 1, 1, 1, 1};
         ps = new double[] {1, 0.88, 0.83, 0.83, 0.83, 0.83, 0.83}; }
     // These parameters were chosen to show the usefulness of the
     // scheduling algorithm in a small network.
     else if (tst_nr == 13) {
         fv = new int[] {-1, 0, 1, 1, 0};
         ps = new double[] {1, 0.8, 0.6, 0.2, 0.2}; }
     // These parameters show that under a linear topology with all links
     // equally good, all select/discard policies perform very similarly.
     else if (tst_nr == 14) {
         fv = new int[] {-1, 0, 1, 2, 3, 4, 5};
         ps = new double[fv.Length];
         for (int i = 0; i < fv.Length; i++)
             ps[i] = 0.5;
     }
     // These parameters show a linear topology whose last node has many
     // children.  This is a good example of a situation in which the
     // select/discard type 2 performs much better than the other types.
     else if (tst_nr == 15) {
         fv = new int[] { -1, 0, 1, 2, 3, 4, 5, 5, 5, 5, 5, 5 };
         ps = new double[12];
         for (int i = 0; i < fv.Length; i++)
         {
             if (i < 6)
                 ps[i] = 0.3;
             else
                 ps[i] = 1;
         }
     }
     // These parameters show that under a linear topology with unequal
     // link qualities, the priority traffic may perform best.
     else if (tst_nr == 16) {
         fv = new int[] {-1, 0, 1, 2, 3, 4, 5, 6, 7};
         ps = new double[fv.Length];
         for (int i = 0; i < fv.Length; i++) {
             if (i < 4)
                 ps[i] = 0.4;
             else
                 ps[i] = 1;
         }
     }
     else if (tst_nr == 17) {
         fv = new int[] {-1, 0, 1, 1};
         ps = new double[] {1, 0.8, 0.4, 0.4};
         source_min = 2;
     }
     else if (tst_nr == 18) {
         fv = new int[] {-1, 0, 1, 1, 0};
         ps = new double[] {1, 0.8, 0.6, 0.2, 0.2};
         source_min = 2;
     }
     else if (tst_nr == 19) {
         fv = new int[] {-1, 0, 1, 1, 0};
         ps = new double[] {1, 0.66, 0.49, 0.16, 0.16};
         source_min = 2;
         sched_lgth = 5;
     }
     else throw new ArgumentException("Inappropriate tst_nr");
     PltGlb.plot_logical3(fv, ps, plot);
     double[,] tota = new double[rate_v.Length, types.Length];
     double[,] mean = new double[rate_v.Length, types.Length];
     double[,] pmin = new double[rate_v.Length, types.Length];
     for (int k = 0; k < n_averages; k++) {
         Console.WriteLine("Repetition {0} of {1}({2:d2},{3:d6},{4}) {5}",
                 k, G.current(), tst_nr, n_averages, plot, G.elapsed());
         for (int j = 0; j < rate_v.Length; j++) {
             for (int i = 0; i < types.Length; i++) {
                 LossTree t = new LossTree(fv, ps, buffer_size);
                 if (types[i] == 3) {
                     t.find_schedule(sched_lgth, source_min);
                     if (k == 0 && j == 0)
                         opt = ((double)t.count.Count / sched_lgth);
                 }
                 int[] results = t.simulate_it(n_tx_frames, rate_v[j],
                         types[i], k);
                 foreach (int h in results) {
                     tota[j, i] += (double)h / n_averages / n_tx_frames;
                     mean[j, i] += (double)h / n_averages / results.Length;
                     if (h < source_min)
                         pmin[j, i] += (double)1 / n_averages /
                             results.Length;
                 }
             }
         }
     }
     Console.WriteLine("**** Printing results *****");
     string[] legv = new string[] { "0", "1", "2",
                    String.Format("3={0:F3}", opt), "4", "5" };
     Pgf g = new Pgf();
     g.add("rate", "total");
     g.mplot(rate_v, tota, legv);
     g.add("rate", "mean");
     g.mplot(rate_v, mean, legv);
     g.add("rate", "pmin");
     g.mplot(rate_v, pmin, legv);
     g.add("missing data tolerance", "rate efficiency");
     g.implot(rate_v, pmin, legv);
     g.extra_body.Add("\n\\includegraphics[scale=0.4]{ztree.pdf}\n");
     string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
             tst_nr, n_averages);
     g.save(filename, plot);
 }
Exemplo n.º 7
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 // 438 seconds per iteration in ee-moda
 public static void averSource(int tst_nr, int n_averages, int plot)
 {
     Console.WriteLine("Executing {0}({1:d2},{2:d6},{3})", G.current(),
             tst_nr, n_averages, plot);
     double tx_rg = 2;
     double x = 3 * tx_rg;
     double y = 3 * tx_rg;
     double rho = 9;
     int n = (int)(rho * x * y / Math.PI / tx_rg / tx_rg);
     int sched_lgth = 20;
     int n_tx_frames = 5000;
     double infid_thresh = 0.15;
     // Parameters for the all-transmit-in-all approach: number of blocks
     // and packets per block.
     int blocks = 200;
     int n_packets = 8;
     int[] source_min_v = new int[] {3, 5, 8, 11, 14, 17, 20};
     Console.WriteLine("Simulating {0:d} nodes", n);
     // Number of tree reconfiguration cycles used to balance the energy
     // consumption.
     int n_tree_reconf = 5;
     int buffer_size = 30;
     int[] types = new int[] {0, 3, 9};
     G.VB = false;
     G.rx_consum = 0.001;
     double[,] consum_mean = new double[source_min_v.Length, 3];
     double[,] consum_median = new double[source_min_v.Length, 3];
     double[,] consum_max = new double[source_min_v.Length, 3];
     for (int k = 0; k < n_averages; k++)
     {
         Console.WriteLine("Repetition {0,4:D}. Total {1}", k,
                 G.elapsed());
         Console.WriteLine("                    Current time is {0}",
                 DateTime.Now.ToString("u"));
         G.rgen = new Random(k);
         AverTree at = new AverTree(n, x, y, tx_rg);
         for (int a = 0; a < source_min_v.Length; a++)
         {
             for (int d = 0; d < types.Length; d++)
             {
                 double[] tot_consum1 = new double[n];
                 G.rgen = new Random(k);
                 if (types[d] == 9)
                 {
                     for (int h = 0; h < n_tree_reconf; h++)
                     {
                         at.get_tree(tot_consum1);
                         LossTree e = new LossTree(at.fv, at.ps,
                                 buffer_size);
                         e.simulate_it2(blocks, n_packets, h);
                         for (int i = 0; i < n; i++)
                             tot_consum1[i] += e.nodes[i].consum /
                                 n_tree_reconf;
                     }
                 }
                 else
                 {
                     for (int h = 0; h < n_tree_reconf; h++)
                     {
                         double[] consum_old = new double[n];
                         for (double rate =0.1; ; rate *= 1.05)
                         {
                             at.get_tree(tot_consum1);
                             LossTree t = new LossTree(at.fv, at.ps,
                                     buffer_size);
                             t.find_schedule(sched_lgth, source_min_v[a]);
                             // This is supposed to show the optimal rate.
                             // Console.WriteLine(((double)t.count.Count /
                             // sched_lgth));
                             int [] results = t.simulate_it(n_tx_frames,
                                     rate, types[d], h);
                             // Fraction of reporting intervals with
                             // insufficient count
                             double infid_ratio = 0.0;
                             foreach (int m in results)
                             {
                                 if (m < source_min_v[a])
                                     infid_ratio
                                         +=1.0/(double)results.Length;
                             }
                             if (infid_ratio < infid_thresh)
                             {
                                 // This rate yields sufficiently low
                                 // infid_ratio.  Record the consumption in
                                 // case this is the last rate to yield
                                 // sufficiently low infid_ratio.
                                 for (int q = 0; q < n; q++)
                                     consum_old[q] = t.nodes[q].consum;
                             }
                             else
                             {
                                 // Record statistics in permanent
                                 // variable This rate is the smallest
                                 // rate that is too high.  Record
                                 // consumption of the previous
                                 // iteration.
                                 for (int r=0; r < n; r++)
                                     tot_consum1[r] += consum_old[r] /
                                         n_tree_reconf;
                                 break;
                             }
                         }
                     }
                 }
                 consum_mean[a,d] += G.Mean(tot_consum1) / n_averages;
                 consum_median[a,d] += G.Median(tot_consum1) / n_averages;
                 consum_max[a,d] += G.Max(tot_consum1) / n_averages;
             }
         }
     }
     string[] legv = new string[] {"0", "3", "9"};
     Pgf g = new Pgf();
     g.extra_body.Add(String.Format("\n\nThe number of nodes is {0:d}",
                 n));
     for (int i = 0; i < 2; i++)
     {
         string xaxis = "source.min";
         double[] xvec = new double[source_min_v.Length];
         if (i == 0)
             for (int q = 0; q < source_min_v.Length; q++)
                 xvec[q] = (double) source_min_v[q];
         else if (i == 1)
         {
             for (int q = 0; q < source_min_v.Length; q++)
                 xvec[q] = 100 * source_min_v[q] / (double) (n-1);
             xaxis = "percent source.min";
         }
         g.add(xaxis, "consum-mean");
         g.mplot(xvec, consum_mean, legv);
         g.add(xaxis, "consum-median");
         g.mplot(xvec, consum_median, legv);
         g.add(xaxis, "consum-max");
         g.mplot(xvec, consum_max, legv);
         double[,] gain_mean = new double [source_min_v.Length, 2];
         double[,] gain_median = new double [source_min_v.Length, 2];
         double[,] gain_max = new double [source_min_v.Length, 2];
         for (int q = 0; q < source_min_v.Length; q++)
             for (int s = 0; s < 2; s++)
             {
                 gain_mean[q, s] = 100 * (consum_mean[q, 2] -
                         consum_mean[q, s]) / consum_mean[q, 2];
                 gain_median[q, s] = 100 * (consum_median[q, 2] -
                         consum_median[q, s]) / consum_median[q, 2];
                 gain_max[q, s] = 100 * (consum_max[q, 2] -
                         consum_max[q, s]) / consum_max[q, 2];
             }
         string[] legv2 = new string[] {"0", "3"};
         g.add(xaxis, "gain-mean");
         g.mplot(xvec, gain_mean, legv2);
         g.add(xaxis, "gain-median");
         g.mplot(xvec, gain_median, legv2);
         g.add(xaxis, "gain-max");
         g.mplot(xvec, gain_max, legv2);
     }
     string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
                                     tst_nr, n_averages);
     g.save(filename, plot);
         //Console.WriteLine("consum_mean   = {0,8:F3}   {1,8:F3}
         //{2,8:F3}",
         //        consum_mean[0], consum_mean[1], consum_mean[2]);
         //Console.WriteLine("consum_median = {0,8:F3}   {1,8:F3}
         //{2,8:F3}",
         //        consum_median[0], consum_median[1], consum_median[2]);
         //Console.WriteLine("consum_max    = {0,8:F3}   {1,8:F3}
         //{2,8:F3}",
         //        consum_max[0], consum_max[1], consum_max[2]);
 }
Exemplo n.º 8
0
Arquivo: net4.cs Projeto: buguen/wsnpy
 // Vary node density and keep constant the fraction of n_source_nodes
 // 1044s per iteration at ee-modalap
 public static void averSize(int tst_nr, int n_averages, int plot)
 {
     Console.WriteLine("Executing {0}({1:d2},{2:d6},{3})", G.current(),
             tst_nr, n_averages, plot);
     double tx_rg = 2;
     double x = 2 * tx_rg;
     double[] y_v_n = new double[] {2, 3};// y vector normalized by tx_rg
     int[] types = new int[] {0, 3, 9};
     if (tst_nr == 1)
     {
         y_v_n = new double[] {2, 3, 4, 5, 6};
         types = new int[] {0, 1, 3, 4, 5, 9};
     }
     double[] y_v = new double[y_v_n.Length];
     for (int i = 0; i < y_v.Length; i++)
         y_v[i] = y_v_n[i] * tx_rg;
     double rho = 10;
     int sched_lgth = 20;
     int n_tx_frames = 5000;
     double infid_thresh = 0.15;
     // Parameters for the all-transmit-in-all approach: number of blocks
     // and packets per block.
     int blocks = 200;
     int n_packets = 8;
     // Number of tree reconfiguration cycles used to balance the energy
     // consumption.
     int n_tree_reconf = 5;
     int buffer_size = 30;
     G.VB = false;
     G.rx_consum = 0.001;
     double[,] consum_mean = new double[y_v.Length, types.Length];
     double[,] consum_median = new double[y_v.Length, types.Length];
     double[,] consum_max = new double[y_v.Length, types.Length];
     double[,] rate_min_v = new double[y_v.Length, types.Length - 1];
     for (int k = 0; k < n_averages; k++) {
         Console.WriteLine("Repetition {0,4:D}. Total {1}", k,
                 G.elapsed());
         for (int a = 0; a < y_v.Length; a++) {
             int n = (int)(rho * x * y_v[a] / Math.PI / tx_rg / tx_rg);
             int source_min = (int) (0.4 * (double) n);
             Console.WriteLine("Simulating {0:d} nodes", n);
             G.rgen = new Random(k);
             AverTree at = new AverTree(n, x, y_v[a], tx_rg);
             for (int d = 0; d < types.Length; d++) {
                 double[] tot_consum1 = new double[n];
                 G.rgen = new Random(k);
                 if (types[d] == 9) {
                     for (int h = 0; h < n_tree_reconf; h++) {
                         at.get_tree(tot_consum1);
                         LossTree e = new LossTree(at.fv, at.ps,
                                 buffer_size);
                         e.simulate_it2(blocks, n_packets, h);
                         for (int i = 0; i < n; i++)
                             tot_consum1[i] += e.nodes[i].consum /
                                 n_tree_reconf;
                     }
                 }
                 else {
                     for (int h = 0; h < n_tree_reconf; h++) {
                         double[] consum_old = new double[n];
                         double expon = 1.05;
                         for (double rate =0.1; ; rate *= expon) {
                             at.get_tree(tot_consum1);
                             LossTree t = new LossTree(at.fv, at.ps,
                                     buffer_size);
                             t.find_schedule(sched_lgth, source_min);
                             // This is supposed to show the optimal rate.
                             // Console.WriteLine(((double)t.count.Count /
                             // sched_lgth));
                             int [] results = t.simulate_it(n_tx_frames,
                                     rate, types[d], h);
                             // Fraction of reporting intervals with
                             // insufficient count
                             double infid_ratio = 0.0;
                             foreach (int m in results)
                                 if (m < source_min)
                                     infid_ratio +=
                                         1.0/(double)results.Length;
                             // This rate yields sufficiently low
                             // infid_ratio.  Record the consumption in
                             // case this is the last rate to yield
                             // sufficiently low infid_ratio.
                             if (infid_ratio < infid_thresh)
                                 for (int q = 0; q < n; q++)
                                     consum_old[q] = t.nodes[q].consum;
                             else {
                                 // Record statistics in permanent
                                 // variable This rate is the smallest
                                 // rate that is too high.  Record
                                 // consumption of the previous
                                 // iteration.
                                 rate_min_v[a,d] +=rate/expon/
                                     n_tree_reconf / n_averages;
                                 for (int r=0; r < n; r++)
                                     tot_consum1[r] += consum_old[r] /
                                         n_tree_reconf;
                                 break;
                             }
                         }
                     }
                 }
                 consum_mean[a,d] += G.Mean(tot_consum1) / n_averages;
                 consum_median[a,d] += G.Median(tot_consum1) / n_averages;
                 consum_max[a,d] += G.Max(tot_consum1) / n_averages;
             }
         }
     }
     string[] legv = new string[types.Length];
     for (int i = 0; i < types.Length; i++)
         legv[i] = types[i].ToString();
     Pgf g = new Pgf();
     string xaxis = "normalized y-v";
     g.add(xaxis, "consum-mean");
     g.mplot(y_v_n, consum_mean, legv);
     g.add(xaxis, "consum-median");
     g.mplot(y_v_n, consum_median, legv);
     g.add(xaxis, "consum-max");
     g.mplot(y_v_n, consum_max, legv);
     double[,] gain_mean = new double [y_v_n.Length, types.Length - 1];
     double[,] gain_median = new double [y_v_n.Length, types.Length -1];
     double[,] gain_max = new double [y_v_n.Length, types.Length - 1];
     int refz = types.Length - 1; // Column used as a benchmark
     for (int q = 0; q < y_v_n.Length; q++)
         for (int s = 0; s < types.Length - 1; s++)
         {
             gain_mean[q, s] = 100 * (consum_mean[q, refz] -
                     consum_mean[q, s]) / consum_mean[q, refz];
             gain_median[q, s] = 100 * (consum_median[q, refz] -
                     consum_median[q, s]) / consum_median[q, refz];
             gain_max[q, s] = 100 * (consum_max[q, refz] -
                     consum_max[q, s]) / consum_max[q, refz];
         }
     string[] legv2 = new string[types.Length - 1];
     for (int i = 0; i < types.Length - 1; i++)
             legv2[i] = types[i].ToString();
     g.add(xaxis, "normalized load");
     g.mplot(y_v_n, rate_min_v, legv2);
     g.add(xaxis, "gain-mean");
     g.mplot(y_v_n, gain_mean, legv2);
     g.add(xaxis, "gain-median");
     g.mplot(y_v_n, gain_median, legv2);
     g.add(xaxis, "gain-max");
     g.mplot(y_v_n, gain_max, legv2);
     g.add(xaxis, "max-rate");
     g.mplot(y_v_n, rate_min_v, legv2);
     string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
             tst_nr, n_averages);
     g.save(filename, plot);
     //Console.WriteLine("consum_mean   = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_mean[0], consum_mean[1], consum_mean[2]);
     //Console.WriteLine("consum_median = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_median[0], consum_median[1], consum_median[2]);
     //Console.WriteLine("consum_max    = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_max[0], consum_max[1], consum_max[2]);
 }
Exemplo n.º 9
0
Arquivo: net3.cs Projeto: buguen/wsnpy
 public static void averRxConsum(int tst_nr, int n_averages, int plot)
 {
     Console.WriteLine("Executing {0}({1:d2},{2:d6},{3})", G.current(),
             tst_nr, n_averages, plot);
     double tx_rg = 2;
     double x = 3 * tx_rg;
     double y = 3 * tx_rg;
     double rho = 9;
     int n = (int)(rho * x * y / Math.PI / tx_rg / tx_rg);
     int sched_lgth = 20;
     int n_tx_frames = 5000;
     double infid_thresh = 0.15;
     // Parameters for the all-transmit-in-all approach: number of blocks
     // and packets per block.
     int blocks = 200;
     int n_packets = 8;
     int source_min = 1;
     if (tst_nr == 0)
         source_min = 3;//(int) (n * 0.2);
     else if (tst_nr == 1)
         source_min = 5;
     else if (tst_nr == 2)
         source_min = 8;
     else
         throw new Exception("Invalid tst_nr");
     Console.WriteLine("source_min = {0:d}", source_min);
     Console.WriteLine("Simulating {0:d} nodes", n);
     // Number of tree reconfiguration cycles used to balance the energy
     // consumption.
     int n_tree_reconf = 5;
     double [] tx_factor_v = new double[] {0, 4, 8, 12, 16};
     double [] rx_consum_v = new double[tx_factor_v.Length];
     for (int i = 0; i < tx_factor_v.Length; i++)
     {
         rx_consum_v[i] = Math.Pow(10, - tx_factor_v[i] / 10);
         Console.WriteLine(rx_consum_v[i]);
     }
     int buffer_size = 30;
     int[] types = new int[] {0, 3, 9};
     G.VB = false;
     double[,] consum_mean = new double[rx_consum_v.Length, 3];
     double[,] consum_median = new double[rx_consum_v.Length, 3];
     double[,] consum_max = new double[rx_consum_v.Length, 3];
     for (int k = 0; k < n_averages; k++)
     {
         G.rgen = new Random(k);
         AverTree at = new AverTree(n, x, y, tx_rg);
         for (int a = 0; a < rx_consum_v.Length; a++)
         {
             G.rx_consum = rx_consum_v[a];
             for (int d = 0; d < types.Length; d++)
             {
                 double[] tot_consum1 = new double[n];
                 G.rgen = new Random(k);
                 if (types[d] == 9)
                 {
                     for (int h = 0; h < n_tree_reconf; h++)
                     {
                         at.get_tree(tot_consum1);
                         LossTree e = new LossTree(at.fv, at.ps,
                                 buffer_size);
                         e.simulate_it2(blocks, n_packets, h);
                         for (int i = 0; i < n; i++)
                             tot_consum1[i] += e.nodes[i].consum /
                                 n_tree_reconf;
                     }
                 }
                 else
                 {
                     for (int h = 0; h < n_tree_reconf; h++)
                     {
                         double[] consum_old = new double[n];
                         for (double rate =0.1; ; rate *= 1.05)
                         {
                             at.get_tree(tot_consum1);
                             LossTree t = new LossTree(at.fv, at.ps,
                                     buffer_size);
                             t.find_schedule(sched_lgth, source_min, false);
                             // This is supposed to show the optimal rate.
                             // Console.WriteLine(((double)t.count.Count /
                             // sched_lgth));
                             int [] results = t.simulate_it(n_tx_frames,
                                     rate, types[d], h);
                             // Fraction of reporting intervals with
                             // insufficient count
                             double infid_ratio = 0.0;
                             foreach (int m in results)
                             {
                                 if (m < source_min)
                                     infid_ratio
                                         +=1.0/(double)results.Length;
                             }
                             if (infid_ratio < infid_thresh)
                             {
                                 // This rate yields sufficiently low
                                 // infid_ratio.  Record the consumption in
                                 // case this is the last rate to yield
                                 // sufficiently low infid_ratio.
                                 for (int q = 0; q < n; q++)
                                     consum_old[q] = t.nodes[q].consum;
                             }
                             else
                             {
                                 // Record statistics in permanent
                                 // variable This rate is the smallest
                                 // rate that is too high.  Record
                                 // consumption of the previous
                                 // iteration.
                                 for (int r=0; r < n; r++)
                                     tot_consum1[r] += consum_old[r] /
                                         n_tree_reconf;
                                 break;
                             }
                         }
                     }
                 }
                 consum_mean[a,d] += G.Mean(tot_consum1) / n_averages;
                 consum_median[a,d] += G.Median(tot_consum1) / n_averages;
                 consum_max[a,d] += G.Max(tot_consum1) / n_averages;
             }
         }
     }
     string[] legv = new string[] {"0", "3", "9"};
     Pgf g = new Pgf();
     string xaxis = "$10\\,\\mathrm{log}_{10}(P_{tx}/P_{rx})$";
     g.add(xaxis, "consum-mean");
     g.mplot(tx_factor_v, consum_mean, legv);
     g.add(xaxis, "consum-median");
     g.mplot(tx_factor_v, consum_median, legv);
     g.add(xaxis, "consum-max");
     g.mplot(tx_factor_v, consum_max, legv);
     double[,] gain_mean = new double [rx_consum_v.Length, 2];
     double[,] gain_median = new double [rx_consum_v.Length, 2];
     double[,] gain_max = new double [rx_consum_v.Length, 2];
     for (int q = 0; q < rx_consum_v.Length; q++)
         for (int s = 0; s < 2; s++)
         {
             gain_mean[q, s] = 100 * (consum_mean[q, 2] -
                     consum_mean[q, s]) / consum_mean[q, 2];
             gain_median[q, s] = 100 * (consum_median[q, 2] -
                     consum_median[q, s]) / consum_median[q, 2];
             gain_max[q, s] = 100 * (consum_max[q, 2] -
                     consum_max[q, s]) / consum_max[q, 2];
         }
     string[] legv2 = new string[] {"0", "3"};
     g.add(xaxis, "gain-mean");
     g.mplot(tx_factor_v, gain_mean, legv2);
     g.add(xaxis, "gain-median");
     g.mplot(tx_factor_v, gain_median, legv2);
     g.add(xaxis, "gain-max");
     g.mplot(tx_factor_v, gain_max, legv2);
     string filename = String.Format("{0}_{1:d2}_{2:d6}", G.current(),
             tst_nr, n_averages);
     g.save(filename, plot);
     //Console.WriteLine("consum_mean   = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_mean[0], consum_mean[1], consum_mean[2]);
     //Console.WriteLine("consum_median = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_median[0], consum_median[1], consum_median[2]);
     //Console.WriteLine("consum_max    = {0,8:F3}   {1,8:F3}   {2,8:F3}",
     //        consum_max[0], consum_max[1], consum_max[2]);
 }