Esempio n. 1
0
        /// <summary>
        /// Write the anonymized files and mapping from original data from the standard input.
        /// </summary>
        /// <param name="linked_col">Linked columns in each line of the standard input</param>
        /// <param name="seed">Seed.</param>
        public void write_from_console(int linked_col, int seed)
        {
            // We don't know the number of rows in advance. Take 10M and throw error if there are more incoming lines then this.
            n_rows = 10000000;
            var ks = new KnuthShuffle();

            rand_perm = ks.get_random_permutation(n_rows, seed);
            FileStream console_stream = (FileStream)Console.OpenStandardInput();
            TextReader tr             = new StreamReader(console_stream);

            write_anonymized_file(tr, "generated.csv", linked_col);
            tr.Close();
        }
Esempio n. 2
0
        /// <summary>
        /// Write the anonymized files and mapping from original data to anonymized values.
        /// </summary>
        /// <param name="input_file_names">File names.</param>
        /// <param name="linked_cols">Linked columns.</param>
        /// <param name="seed">Seed.</param>
        public void write_from_files(string[] input_file_names, int[] linked_cols, int seed)
        {
            n_rows = 0;
            int n_files = input_file_names.Length;

            for (int i = 0; i < n_files; i++)
            {
                n_rows += countLineBreaksInFile(input_file_names [i]);
            }

            var ks = new KnuthShuffle();

            rand_perm = ks.get_random_permutation(n_rows, seed);

            for (int i = 0; i < n_files; i++)
            {
                TextReader tr = new StreamReader(input_file_names [i]);
                write_anonymized_file(tr, input_file_names [i], linked_cols [i]);
                tr.Close();
            }

            write_map("anonymizing_map.csv");
        }