/// <exception cref="System.Exception"/> private void RunOperationOk(ConfigExtractor cfg, Operation op, bool checkOk) { FileSystem fs = FileSystem.Get(cfg.GetConfig()); IList <OperationOutput> data = op.Run(fs); NUnit.Framework.Assert.IsTrue(!data.IsEmpty()); if (checkOk) { bool foundSuc = false; bool foundOpCount = false; bool foundTime = false; foreach (OperationOutput d in data) { NUnit.Framework.Assert.IsTrue(!d.GetMeasurementType().Equals(ReportWriter.Failures )); if (d.GetMeasurementType().Equals(ReportWriter.Successes)) { foundSuc = true; } if (d.GetMeasurementType().Equals(ReportWriter.OpCount)) { foundOpCount = true; } if (d.GetMeasurementType().Equals(ReportWriter.OkTimeTaken)) { foundTime = true; } } NUnit.Framework.Assert.IsTrue(foundSuc); NUnit.Framework.Assert.IsTrue(foundOpCount); NUnit.Framework.Assert.IsTrue(foundTime); } }
public virtual void TestArguments() { ConfigExtractor extractor = GetTestConfig(true); NUnit.Framework.Assert.AreEqual(extractor.GetOpCount(), Constants.OperationType.Values ().Length); NUnit.Framework.Assert.AreEqual(extractor.GetMapAmount(), 2); NUnit.Framework.Assert.AreEqual(extractor.GetReducerAmount(), 2); Range <long> apRange = extractor.GetAppendSize(); NUnit.Framework.Assert.AreEqual(apRange.GetLower(), Constants.Megabytes * 1); NUnit.Framework.Assert.AreEqual(apRange.GetUpper(), Constants.Megabytes * 2); Range <long> wRange = extractor.GetWriteSize(); NUnit.Framework.Assert.AreEqual(wRange.GetLower(), Constants.Megabytes * 1); NUnit.Framework.Assert.AreEqual(wRange.GetUpper(), Constants.Megabytes * 2); Range <long> trRange = extractor.GetTruncateSize(); NUnit.Framework.Assert.AreEqual(trRange.GetLower(), 0); NUnit.Framework.Assert.AreEqual(trRange.GetUpper(), Constants.Megabytes * 1); Range <long> bRange = extractor.GetBlockSize(); NUnit.Framework.Assert.AreEqual(bRange.GetLower(), Constants.Megabytes * 1); NUnit.Framework.Assert.AreEqual(bRange.GetUpper(), Constants.Megabytes * 2); string resfile = extractor.GetResultFile(); NUnit.Framework.Assert.AreEqual(resfile, GetResultFile().ToString()); int durationMs = extractor.GetDurationMilliseconds(); NUnit.Framework.Assert.AreEqual(durationMs, 10 * 1000); }
/// <summary> /// Sets up the operation using the given configuration by setting up the /// number of operations to perform (and how many are left) and setting up the /// operation objects to be used throughout selection. /// </summary> /// <param name="cfg">ConfigExtractor.</param> private void ConfigureOperations(ConfigExtractor cfg) { operations = new SortedDictionary <Constants.OperationType, WeightSelector.OperationInfo >(); IDictionary <Constants.OperationType, OperationData> opinfo = cfg.GetOperations(); int totalAm = cfg.GetOpCount(); int opsLeft = totalAm; NumberFormat formatter = Formatter.GetPercentFormatter(); foreach (Constants.OperationType type in opinfo.Keys) { OperationData opData = opinfo[type]; WeightSelector.OperationInfo info = new WeightSelector.OperationInfo(); info.distribution = opData.GetDistribution(); int amLeft = DetermineHowMany(totalAm, opData, type); opsLeft -= amLeft; Log.Info(type.ToString() + " has " + amLeft + " initial operations out of " + totalAm + " for its ratio " + formatter.Format(opData.GetPercent())); info.amountLeft = amLeft; Operation op = factory.GetOperation(type); // wrap operation in finalizer so that amount left gets decrements when // its done if (op != null) { ObserveableOp.Observer fn = new _Observer_138(this, type); info.operation = new ObserveableOp(op, fn); operations[type] = info; } } if (opsLeft > 0) { Log.Info(opsLeft + " left over operations found (due to inability to support partial operations)" ); } }
public virtual void TestSleep() { ConfigExtractor extractor = GetTestConfig(true); SleepOp op = new SleepOp(extractor, rnd); RunOperationOk(extractor, op, true); }
internal WeightSelector(ConfigExtractor cfg, Random rnd) { selector = new RouletteSelector(rnd); factory = new OperationFactory(cfg, rnd); ConfigureOperations(cfg); ConfigureWeights(cfg); }
/* * (non-Javadoc) * * @see * org.apache.hadoop.mapred.MapReduceBase#configure(org.apache.hadoop.mapred * .JobConf) */ public override void Configure(JobConf conf) { // MapReduceBase try { config = new ConfigExtractor(conf); ConfigExtractor.DumpOptions(config); filesystem = config.GetBaseDirectory().GetFileSystem(conf); } catch (Exception e) { Log.Error("Unable to setup slive " + StringUtils.StringifyException(e)); throw new RuntimeException("Unable to setup slive configuration", e); } if (conf.Get(MRJobConfig.TaskAttemptId) != null) { this.taskId = TaskAttemptID.ForName(conf.Get(MRJobConfig.TaskAttemptId)).GetTaskID ().GetId(); } else { // So that branch-1/0.20 can run this same code as well this.taskId = TaskAttemptID.ForName(conf.Get("mapred.task.id")).GetTaskID().GetId (); } }
public _RenameOp_403(Path src, Path tgt, ConfigExtractor baseArg1, Random baseArg2 ) : base(baseArg1, baseArg2) { this.src = src; this.tgt = tgt; }
public virtual void TestMkdir() { ConfigExtractor extractor = GetTestConfig(false); Path dir = new Path(GetTestDir().GetCanonicalPath()); MkdirOp op = new _MkdirOp_498(dir, extractor, rnd); RunOperationOk(extractor, op, true); }
public virtual void TestCreateOp() { // setup a valid config ConfigExtractor extractor = GetTestConfig(false); Path fn = new Path(GetTestFile().GetCanonicalPath()); CreateOp op = new _CreateOp_285(fn, extractor, rnd); RunOperationOk(extractor, op, true); }
protected internal Operation(string type, ConfigExtractor cfg, Random rnd) { this.config = cfg; this.type = type; this.rnd = rnd; // Use a new Random instance so that the sequence of file names produced is // the same even in case of unsuccessful operations this.finder = new PathFinder(cfg, new Random(rnd.Next())); }
/// <summary>Cleans up the base directory by removing it</summary> /// <param name="cfg">ConfigExtractor which has location of base directory</param> /// <exception cref="System.IO.IOException"/> private void Cleanup(ConfigExtractor cfg) { Path @base = cfg.GetBaseDirectory(); if (@base != null) { Log.Info("Attempting to recursively delete " + @base); FileSystem fs = @base.GetFileSystem(cfg.GetConfig()); fs.Delete(@base, true); } }
public virtual void TestRead() { ConfigExtractor extractor = GetTestConfig(false); Path fn = new Path(GetTestFile().GetCanonicalPath()); // ensure file created before read CreateOp op = new _CreateOp_429(fn, extractor, rnd); RunOperationOk(extractor, op, true); ReadOp rop = new _ReadOp_435(fn, extractor, rnd); RunOperationOk(extractor, rop, true); }
// gets the config merged with the arguments /// <exception cref="System.Exception"/> private ConfigExtractor GetTestConfig(bool sleep) { ArgumentParser parser = new ArgumentParser(GetTestArgs(sleep)); ArgumentParser.ParsedOutput @out = parser.Parse(); NUnit.Framework.Assert.IsTrue([email protected]()); ConfigMerger merge = new ConfigMerger(); Configuration cfg = merge.GetMerged(@out, GetBaseConfig()); ConfigExtractor extractor = new ConfigExtractor(cfg); return(extractor); }
public virtual void TestMRFlow() { ConfigExtractor extractor = GetTestConfig(false); SliveTest s = new SliveTest(GetBaseConfig()); int ec = ToolRunner.Run(s, GetTestArgs(false)); NUnit.Framework.Assert.IsTrue(ec == 0); string resFile = extractor.GetResultFile(); FilePath fn = new FilePath(resFile); NUnit.Framework.Assert.IsTrue(fn.Exists()); }
public virtual void TestList() { // ensure dir made ConfigExtractor extractor = GetTestConfig(false); Path dir = new Path(GetTestDir().GetCanonicalPath()); MkdirOp op = new _MkdirOp_455(dir, extractor, rnd); RunOperationOk(extractor, op, true); // list it ListOp lop = new _ListOp_462(dir, extractor, rnd); RunOperationOk(extractor, lop, true); }
public virtual void TestRename() { ConfigExtractor extractor = GetTestConfig(false); Path src = new Path(GetTestFile().GetCanonicalPath()); Path tgt = new Path(GetTestRenameFile().GetCanonicalPath()); // ensure file created before rename CreateOp op = new _CreateOp_397(src, extractor, rnd); RunOperationOk(extractor, op, true); RenameOp rop = new _RenameOp_403(src, tgt, extractor, rnd); RunOperationOk(extractor, rop, true); }
public virtual void TestDelete() { ConfigExtractor extractor = GetTestConfig(false); Path fn = new Path(GetTestFile().GetCanonicalPath()); // ensure file created before delete CreateOp op = new _CreateOp_376(fn, extractor, rnd); RunOperationOk(extractor, op, true); // now delete DeleteOp dop = new _DeleteOp_383(fn, extractor, rnd); RunOperationOk(extractor, dop, true); }
public virtual void TestTruncateOp() { // setup a valid config ConfigExtractor extractor = GetTestConfig(false); // ensure file created before append Path fn = new Path(GetTestFile().GetCanonicalPath()); CreateOp op = new _CreateOp_549(fn, extractor, rnd); RunOperationOk(extractor, op, true); // local file system (ChecksumFileSystem) currently doesn't support truncate - // but we'll leave this test here anyways but can't check the results.. TruncateOp top = new _TruncateOp_557(fn, extractor, rnd); RunOperationOk(extractor, top, false); }
public virtual void TestSelector() { ConfigExtractor extractor = GetTestConfig(false); RouletteSelector selector = new RouletteSelector(rnd); IList <OperationWeight> sList = new List <OperationWeight>(); Operation op = selector.Select(sList); NUnit.Framework.Assert.IsTrue(op == null); CreateOp cop = new CreateOp(extractor, rnd); sList.AddItem(new OperationWeight(cop, 1.0d)); AppendOp aop = new AppendOp(extractor, rnd); sList.AddItem(new OperationWeight(aop, 0.01d)); op = selector.Select(sList); NUnit.Framework.Assert.IsTrue(op == cop); }
public virtual void TestOpFailures() { ConfigExtractor extractor = GetTestConfig(false); Path fn = new Path(GetImaginaryFile().GetCanonicalPath()); ReadOp rop = new _ReadOp_297(fn, extractor, rnd); RunOperationBad(extractor, rop); DeleteOp dop = new _DeleteOp_304(fn, extractor, rnd); RunOperationBad(extractor, dop); RenameOp reop = new _RenameOp_311(fn, extractor, rnd); RunOperationBad(extractor, reop); AppendOp aop = new _AppendOp_318(fn, extractor, rnd); RunOperationBad(extractor, aop); }
/// <summary>Sets up a job conf for the given job using the given config object.</summary> /// <remarks> /// Sets up a job conf for the given job using the given config object. Ensures /// that the correct input format is set, the mapper and and reducer class and /// the input and output keys and value classes along with any other job /// configuration. /// </remarks> /// <param name="config"/> /// <returns>JobConf representing the job to be ran</returns> /// <exception cref="System.IO.IOException"/> private JobConf GetJob(ConfigExtractor config) { JobConf job = new JobConf(config.GetConfig(), typeof(Org.Apache.Hadoop.FS.Slive.SliveTest )); job.SetInputFormat(typeof(DummyInputFormat)); FileOutputFormat.SetOutputPath(job, config.GetOutputPath()); job.SetMapperClass(typeof(SliveMapper)); job.SetPartitionerClass(typeof(SlivePartitioner)); job.SetReducerClass(typeof(SliveReducer)); job.SetOutputKeyClass(typeof(Text)); job.SetOutputValueClass(typeof(Text)); job.SetOutputFormat(typeof(TextOutputFormat)); TextOutputFormat.SetCompressOutput(job, false); job.SetNumReduceTasks(config.GetReducerAmount()); job.SetNumMapTasks(config.GetMapAmount()); return(job); }
/// <exception cref="System.Exception"/> private void RunOperationBad(ConfigExtractor cfg, Operation op) { FileSystem fs = FileSystem.Get(cfg.GetConfig()); IList <OperationOutput> data = op.Run(fs); NUnit.Framework.Assert.IsTrue(!data.IsEmpty()); bool foundFail = false; foreach (OperationOutput d in data) { if (d.GetMeasurementType().Equals(ReportWriter.Failures)) { foundFail = true; } if (d.GetMeasurementType().Equals(ReportWriter.NotFound)) { foundFail = true; } } NUnit.Framework.Assert.IsTrue(foundFail); }
public virtual void TestFinder() { ConfigExtractor extractor = GetTestConfig(false); PathFinder fr = new PathFinder(extractor, rnd); // should only be able to select 10 files // attempt for a given amount of iterations int maxIterations = 10000; ICollection <Path> files = new HashSet <Path>(); for (int i = 0; i < maxIterations; i++) { files.AddItem(fr.GetFile()); } NUnit.Framework.Assert.IsTrue(files.Count == 10); ICollection <Path> dirs = new HashSet <Path>(); for (int i_1 = 0; i_1 < maxIterations; i_1++) { dirs.AddItem(fr.GetDirectory()); } NUnit.Framework.Assert.IsTrue(dirs.Count == 10); }
public virtual void TestSelection() { ConfigExtractor extractor = GetTestConfig(false); WeightSelector selector = new WeightSelector(extractor, rnd); // should be 1 of each type - uniform int expected = Constants.OperationType.Values().Length; Operation op = null; ICollection <string> types = new HashSet <string>(); FileSystem fs = FileSystem.Get(extractor.GetConfig()); while (true) { op = selector.Select(1, 1); if (op == null) { break; } // doesn't matter if they work or not op.Run(fs); types.AddItem(op.GetType()); } NUnit.Framework.Assert.AreEqual(types.Count, expected); }
internal DeleteOp(ConfigExtractor cfg, Random rnd) : base(typeof(Org.Apache.Hadoop.FS.Slive.DeleteOp).Name, cfg, rnd) { }
/// <summary> /// Handles the specific task of merging operations from the command line or /// extractor object into the base configuration provided /// </summary> /// <param name="opts">the parsed command line option output</param> /// <param name="base">the base configuration to merge with</param> /// <param name="extractor"> /// the access object to fetch operations from if none from the /// command line /// </param> /// <returns>merged configuration object</returns> /// <exception cref="ConfigException">when verification fails</exception> /// <exception cref="Org.Apache.Hadoop.FS.Slive.ConfigMerger.ConfigException"/> private Configuration HandleOperations(ArgumentParser.ParsedOutput opts, Configuration @base, ConfigExtractor extractor) { // get the base set to start off with IDictionary <Constants.OperationType, OperationData> operations = GetBaseOperations (); // merge with what is coming from config IDictionary <Constants.OperationType, OperationData> cfgOperations = extractor.GetOperations (); foreach (Constants.OperationType opType in cfgOperations.Keys) { operations[opType] = cfgOperations[opType]; } // see if any coming in from the command line foreach (Constants.OperationType opType_1 in Constants.OperationType.Values()) { string opName = opType_1.LowerName(); string opVal = opts.GetValue(opName); if (opVal != null) { operations[opType_1] = new OperationData(opVal); } } { // remove those with <= zero percent IDictionary <Constants.OperationType, OperationData> cleanedOps = new Dictionary <Constants.OperationType , OperationData>(); foreach (Constants.OperationType opType_2 in operations.Keys) { OperationData data = operations[opType_2]; if (data.GetPercent() == null || data.GetPercent() > 0.0d) { cleanedOps[opType_2] = data; } } operations = cleanedOps; } if (operations.IsEmpty()) { throw new ConfigMerger.ConfigException("No operations provided!"); } // verify and adjust double currPct = 0; int needFill = 0; foreach (Constants.OperationType type in operations.Keys) { OperationData op = operations[type]; if (op.GetPercent() != null) { currPct += op.GetPercent(); } else { needFill++; } } if (currPct > 1) { throw new ConfigMerger.ConfigException("Unable to have accumlative percent greater than 100%" ); } if (needFill > 0 && currPct < 1) { double leftOver = 1.0 - currPct; IDictionary <Constants.OperationType, OperationData> mpcp = new Dictionary <Constants.OperationType , OperationData>(); foreach (Constants.OperationType type_1 in operations.Keys) { OperationData op = operations[type_1]; if (op.GetPercent() == null) { op = new OperationData(op.GetDistribution(), (leftOver / needFill)); } mpcp[type_1] = op; } operations = mpcp; } else { if (needFill == 0 && currPct < 1) { // redistribute double leftOver = 1.0 - currPct; IDictionary <Constants.OperationType, OperationData> mpcp = new Dictionary <Constants.OperationType , OperationData>(); double each = leftOver / operations.Keys.Count; foreach (Constants.OperationType t in operations.Keys) { OperationData op = operations[t]; op = new OperationData(op.GetDistribution(), (op.GetPercent() + each)); mpcp[t] = op; } operations = mpcp; } else { if (needFill > 0 && currPct >= 1) { throw new ConfigMerger.ConfigException(needFill + " unfilled operations but no percentage left to fill with" ); } } } // save into base foreach (Constants.OperationType opType_3 in operations.Keys) { string opName = opType_3.LowerName(); OperationData opData = operations[opType_3]; string distr = opData.GetDistribution().LowerName(); string ratio = opData.GetPercent() * 100.0d.ToString(); @base.Set(string.Format(Constants.Op, opName), opData.ToString()); @base.Set(string.Format(Constants.OpDistr, opName), distr); @base.Set(string.Format(Constants.OpPercent, opName), ratio); } return(@base); }
/// <summary> /// Handles merging all options and verifying from the given command line /// output and the given base configuration and returns the merged /// configuration /// </summary> /// <param name="opts">the parsed command line option output</param> /// <param name="base">the base configuration to merge with</param> /// <returns>the merged configuration</returns> /// <exception cref="ConfigException"/> /// <exception cref="Org.Apache.Hadoop.FS.Slive.ConfigMerger.ConfigException"/> private Configuration HandleOptions(ArgumentParser.ParsedOutput opts, Configuration @base) { // ensure variables are overwritten and verified ConfigExtractor extractor = new ConfigExtractor(@base); { // overwrite the map amount and check to ensure > 0 int mapAmount = null; try { mapAmount = extractor.GetMapAmount(opts.GetValue(ConfigOption.Maps.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging map amount", e ); } if (mapAmount != null) { if (mapAmount <= 0) { throw new ConfigMerger.ConfigException("Map amount can not be less than or equal to zero" ); } @base.Set(ConfigOption.Maps.GetCfgOption(), mapAmount.ToString()); } } { // overwrite the reducer amount and check to ensure > 0 int reduceAmount = null; try { reduceAmount = extractor.GetMapAmount(opts.GetValue(ConfigOption.Reduces.GetOpt() )); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging reducer amount" , e); } if (reduceAmount != null) { if (reduceAmount <= 0) { throw new ConfigMerger.ConfigException("Reducer amount can not be less than or equal to zero" ); } @base.Set(ConfigOption.Reduces.GetCfgOption(), reduceAmount.ToString()); } } { // overwrite the duration amount and ensure > 0 int duration = null; try { duration = extractor.GetDuration(opts.GetValue(ConfigOption.Duration.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging duration", e); } if (duration != null) { if (duration <= 0) { throw new ConfigMerger.ConfigException("Duration can not be less than or equal to zero" ); } @base.Set(ConfigOption.Duration.GetCfgOption(), duration.ToString()); } } { // overwrite the operation amount and ensure > 0 int operationAmount = null; try { operationAmount = extractor.GetOpCount(opts.GetValue(ConfigOption.Ops.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging operation amount" , e); } if (operationAmount != null) { if (operationAmount <= 0) { throw new ConfigMerger.ConfigException("Operation amount can not be less than or equal to zero" ); } @base.Set(ConfigOption.Ops.GetCfgOption(), operationAmount.ToString()); } } { // overwrite the exit on error setting try { bool exitOnError = extractor.ShouldExitOnFirstError(opts.GetValue(ConfigOption.ExitOnError .GetOpt())); @base.SetBoolean(ConfigOption.ExitOnError.GetCfgOption(), exitOnError); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging exit on error value" , e); } } { // overwrite the truncate wait setting try { bool waitOnTruncate = extractor.ShouldWaitOnTruncate(opts.GetValue(ConfigOption.TruncateWait .GetOpt())); @base.SetBoolean(ConfigOption.TruncateWait.GetCfgOption(), waitOnTruncate); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging wait on truncate value" , e); } } { // verify and set file limit and ensure > 0 int fileAm = null; try { fileAm = extractor.GetTotalFiles(opts.GetValue(ConfigOption.Files.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging total file limit amount" , e); } if (fileAm != null) { if (fileAm <= 0) { throw new ConfigMerger.ConfigException("File amount can not be less than or equal to zero" ); } @base.Set(ConfigOption.Files.GetCfgOption(), fileAm.ToString()); } } { // set the grid queue to run on try { string qname = extractor.GetQueueName(opts.GetValue(ConfigOption.QueueName.GetOpt ())); if (qname != null) { @base.Set(ConfigOption.QueueName.GetCfgOption(), qname); } } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging queue name", e ); } } { // verify and set the directory limit and ensure > 0 int directoryLimit = null; try { directoryLimit = extractor.GetDirSize(opts.GetValue(ConfigOption.DirSize.GetOpt() )); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging directory file limit" , e); } if (directoryLimit != null) { if (directoryLimit <= 0) { throw new ConfigMerger.ConfigException("Directory file limit can not be less than or equal to zero" ); } @base.Set(ConfigOption.DirSize.GetCfgOption(), directoryLimit.ToString()); } } { // set the base directory Path basedir = null; try { basedir = extractor.GetBaseDirectory(opts.GetValue(ConfigOption.BaseDir.GetOpt()) ); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging base directory" , e); } if (basedir != null) { // always ensure in slive dir basedir = new Path(basedir, Constants.BaseDir); @base.Set(ConfigOption.BaseDir.GetCfgOption(), basedir.ToString()); } } { // set the result file string fn = null; try { fn = extractor.GetResultFile(opts.GetValue(ConfigOption.ResultFile.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging result file", e); } if (fn != null) { @base.Set(ConfigOption.ResultFile.GetCfgOption(), fn); } } { string fn = null; try { fn = extractor.GetResultFile(opts.GetValue(ConfigOption.ResultFile.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging result file", e); } if (fn != null) { @base.Set(ConfigOption.ResultFile.GetCfgOption(), fn); } } { // set the operations try { @base = HandleOperations(opts, @base, extractor); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging operations", e ); } } { // set the replication amount range Range <short> replicationAm = null; try { replicationAm = extractor.GetReplication(opts.GetValue(ConfigOption.ReplicationAm .GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging replication amount range" , e); } if (replicationAm != null) { int minRepl = @base.GetInt(Constants.MinReplication, 1); if (replicationAm.GetLower() < minRepl) { throw new ConfigMerger.ConfigException("Replication amount minimum is less than property configured minimum " + minRepl); } if (replicationAm.GetLower() > replicationAm.GetUpper()) { throw new ConfigMerger.ConfigException("Replication amount minimum is greater than its maximum" ); } if (replicationAm.GetLower() <= 0) { throw new ConfigMerger.ConfigException("Replication amount minimum must be greater than zero" ); } @base.Set(ConfigOption.ReplicationAm.GetCfgOption(), replicationAm.ToString()); } } { // set the sleep range Range <long> sleepRange = null; try { sleepRange = extractor.GetSleepRange(opts.GetValue(ConfigOption.SleepTime.GetOpt( ))); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging sleep size range" , e); } if (sleepRange != null) { if (sleepRange.GetLower() > sleepRange.GetUpper()) { throw new ConfigMerger.ConfigException("Sleep range minimum is greater than its maximum" ); } if (sleepRange.GetLower() <= 0) { throw new ConfigMerger.ConfigException("Sleep range minimum must be greater than zero" ); } @base.Set(ConfigOption.SleepTime.GetCfgOption(), sleepRange.ToString()); } } { // set the packet size if given string pSize = opts.GetValue(ConfigOption.PacketSize.GetOpt()); if (pSize == null) { pSize = ConfigOption.PacketSize.GetDefault(); } if (pSize != null) { try { long packetSize = StringUtils.TraditionalBinaryPrefix.String2long(pSize); @base.Set(ConfigOption.PacketSize.GetCfgOption(), packetSize.ToString()); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging write packet size" , e); } } } { // set the block size range Range <long> blockSize = null; try { blockSize = extractor.GetBlockSize(opts.GetValue(ConfigOption.BlockSize.GetOpt()) ); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging block size range" , e); } if (blockSize != null) { if (blockSize.GetLower() > blockSize.GetUpper()) { throw new ConfigMerger.ConfigException("Block size minimum is greater than its maximum" ); } if (blockSize.GetLower() <= 0) { throw new ConfigMerger.ConfigException("Block size minimum must be greater than zero" ); } // ensure block size is a multiple of BYTES_PER_CHECKSUM // if a value is set in the configuration long bytesPerChecksum = extractor.GetByteCheckSum(); if (bytesPerChecksum != null) { if ((blockSize.GetLower() % bytesPerChecksum) != 0) { throw new ConfigMerger.ConfigException("Blocksize lower bound must be a multiple of " + bytesPerChecksum); } if ((blockSize.GetUpper() % bytesPerChecksum) != 0) { throw new ConfigMerger.ConfigException("Blocksize upper bound must be a multiple of " + bytesPerChecksum); } } @base.Set(ConfigOption.BlockSize.GetCfgOption(), blockSize.ToString()); } } { // set the read size range Range <long> readSize = null; try { readSize = extractor.GetReadSize(opts.GetValue(ConfigOption.ReadSize.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging read size range" , e); } if (readSize != null) { if (readSize.GetLower() > readSize.GetUpper()) { throw new ConfigMerger.ConfigException("Read size minimum is greater than its maximum" ); } if (readSize.GetLower() < 0) { throw new ConfigMerger.ConfigException("Read size minimum must be greater than or equal to zero" ); } @base.Set(ConfigOption.ReadSize.GetCfgOption(), readSize.ToString()); } } { // set the write size range Range <long> writeSize = null; try { writeSize = extractor.GetWriteSize(opts.GetValue(ConfigOption.WriteSize.GetOpt()) ); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging write size range" , e); } if (writeSize != null) { if (writeSize.GetLower() > writeSize.GetUpper()) { throw new ConfigMerger.ConfigException("Write size minimum is greater than its maximum" ); } if (writeSize.GetLower() < 0) { throw new ConfigMerger.ConfigException("Write size minimum must be greater than or equal to zero" ); } @base.Set(ConfigOption.WriteSize.GetCfgOption(), writeSize.ToString()); } } { // set the append size range Range <long> appendSize = null; try { appendSize = extractor.GetAppendSize(opts.GetValue(ConfigOption.AppendSize.GetOpt ())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging append size range" , e); } if (appendSize != null) { if (appendSize.GetLower() > appendSize.GetUpper()) { throw new ConfigMerger.ConfigException("Append size minimum is greater than its maximum" ); } if (appendSize.GetLower() < 0) { throw new ConfigMerger.ConfigException("Append size minimum must be greater than or equal to zero" ); } @base.Set(ConfigOption.AppendSize.GetCfgOption(), appendSize.ToString()); } } { // set the truncate size range Range <long> truncateSize = null; try { truncateSize = extractor.GetTruncateSize(opts.GetValue(ConfigOption.TruncateSize. GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging truncate size range" , e); } if (truncateSize != null) { if (truncateSize.GetLower() > truncateSize.GetUpper()) { throw new ConfigMerger.ConfigException("Truncate size minimum is greater than its maximum" ); } if (truncateSize.GetLower() < 0) { throw new ConfigMerger.ConfigException("Truncate size minimum must be greater than or equal to zero" ); } @base.Set(ConfigOption.TruncateSize.GetCfgOption(), truncateSize.ToString()); } } { // set the seed long seed = null; try { seed = extractor.GetRandomSeed(opts.GetValue(ConfigOption.RandomSeed.GetOpt())); } catch (Exception e) { throw new ConfigMerger.ConfigException("Error extracting & merging random number seed" , e); } if (seed != null) { @base.Set(ConfigOption.RandomSeed.GetCfgOption(), seed.ToString()); } } return(@base); }
public _TruncateOp_557(Path fn, ConfigExtractor baseArg1, Random baseArg2) : base(baseArg1, baseArg2) { this.fn = fn; }
public _CreateOp_549(Path fn, ConfigExtractor baseArg1, Random baseArg2) : base(baseArg1, baseArg2) { this.fn = fn; }
public _AppendOp_535(Path fn, ConfigExtractor baseArg1, Random baseArg2) : base(baseArg1, baseArg2) { this.fn = fn; }