protected ContentItem(ComponentData content, SessionAwareCoreServiceClient client) { Content = content; Client = client; ReadOptions = new ReadOptions(); ContentManager = new ContentManager(Client); }
public IHttpActionResult NewUser(UserInfoModel data) { if (string.IsNullOrWhiteSpace(data.Name)) { return BadRequest("Name is required."); } try { var defaultReadOptions = new ReadOptions(); var user = (UserData)Client.GetDefaultData(ItemType.User, null, defaultReadOptions); user.Title = data.Name; user.Description = data.Name; if (!string.IsNullOrWhiteSpace(data.Description)) { user.Description = data.Description; } return Ok(Client.Create(user, defaultReadOptions).Id); } catch (Exception ex) { throw new HttpResponseException(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } }
protected ContentItem(TcmUri itemId, SessionAwareCoreServiceClient client) { ReadOptions = new ReadOptions(); Client = client; Content = (ComponentData) client.Read(itemId, ReadOptions); ContentManager = new ContentManager(Client); }
public TwoLevelIterator( Iterator index_iter, BlockFunctionDelegate block_function, object arg, ReadOptions options) { block_function_ = block_function; arg_ = arg; options_ = options; index_iter_ = new IteratorWrapper(index_iter); data_iter_ = null; }
public void TestReadFromSnapshotOfDatabase() { using (var db = new DB(new Options { CreateIfMissing = true }, "test1.db")) { db.Put(new WriteOptions(), Slice.FromString("Foo"), Slice.FromString("Bar")); using (var snapshot = db.GetSnapshot()) { var opts = new ReadOptions{ Snapshot = snapshot }; Assert.IsNotNull(db.Get(opts, Slice.FromString("Foo"))); Assert.IsTrue(db.Delete(new WriteOptions(), Slice.FromString("Foo"))); Assert.IsNotNull(db.Get(opts, Slice.FromString("Foo"))); } } }
public Feed Get(Guid id, ReadOptions readOptions = ReadOptions.Unread) { var feed = _context.Feeds.Find(id); if (feed == null) { return null; } _context.Entry(feed).Reference(f => f.Folder).Load(); _context.Entry(feed) .Collection(f => f.Items) .Query() // TODO: should respect readOptions .Where(i => i.ReadDateTime == null) .Load(); feed.Items = feed.Items.OrderByDescending(i => i.PublishedDateTime).ToList(); return feed; }
public ContentManager(SessionAwareCoreServiceClient client) { _client = client; _readOptions = new ReadOptions(); }
public SchemaData GetSchema(string tcmUri) { ReadOptions readOptions = new ReadOptions { LoadFlags = LoadFlags.None }; return (SchemaData)Client.Read(tcmUri, readOptions); }
/// <summary> /// The constructor reads the csv file to this object /// </summary> /// <param name="fileName">Full path of the csv file</param> /// <param name="delimiter">Delimiter, that separates columns.</param> /// <param name="seperator">Decimal separator.</param> /// <param name="log">Log for saving to log file.</param> public CsvFile(string fileName, ReadOptions options, Log log) { bool writeLogFile = (log != null); if (File.Exists(fileName)) { _fileName = Path.GetFullPath(fileName); using (TextReader reader = new StreamReader(fileName)) { string sLine = reader.ReadLine(); if (null == sLine) throw new ArgumentNullException(string.Format("\"{0}\" is empty, nothing to parse here.", fileName)); List<string> map = new List<string>(); //skip comments while (!string.IsNullOrEmpty(sLine) && sLine.StartsWith("#", StringComparison.OrdinalIgnoreCase)) sLine = reader.ReadLine(); Regex reg = new Regex(string.Format(CultureInfo.CurrentCulture, "{0}(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))", options.Delimiter)); //read the columns from the first line string[] values = reg.Split(sLine); //one element means the line has not been parsed correctly if (null == values || values.Length == 1) throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "The file {0} could not be parsed. Maybe the wrong delimiter is set? It has been set to \"{1}\".", fileName, options.Delimiter)); foreach (string sCol in values) if (!string.IsNullOrEmpty(sCol)) { string sTemp = sCol.Trim(' ', '"', '\t', '\''); if (sTemp != "t" && sTemp != "time" && sTemp != "Time")//Skip time values { try { _values.Add(sTemp, new List<double>()); map.Add(sTemp); } catch (ArgumentException) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Error parsing the csv file \"{0}\". The result {1} is already in the list (maybe you set no or a wrong delimiter for the parser? I used \"{2}\").", fileName, sTemp, options.Delimiter)); } } } CheckHeaderForNumbers(log, writeLogFile, map); //read the rest of the csv file while ((sLine = reader.ReadLine()) != null) { //Skip comments if (sLine.StartsWith("#", StringComparison.OrdinalIgnoreCase)) continue; //values = reg.Split(sLine); //splitting using regular expressions is slow IEnumerable<string> dataValues; if (options.Delimiter.Equals(options.Separator)) dataValues = Tokenize(sLine, options.Delimiter); //use custom tokenizer for improved performance else dataValues = sLine.Split(options.Delimiter); //use ordinary Split function for simple cases int iCol = 0; NumberFormatInfo provider = new NumberFormatInfo(); provider.NumberDecimalSeparator = options.Separator.ToString(); //read values to the dictionary foreach (string sCol in dataValues) { double dValue; if (!Double.TryParse(sCol.Trim('"'), NumberStyles.Any, provider, out dValue)) { if (!string.IsNullOrEmpty(sCol) && writeLogFile) log.WriteLine(LogLevel.Warning, "Could not parse string \"{0}\" as double value, skipping.", sCol); iCol++; continue; } if (iCol == 0) _xAxis.Add(dValue); else try { _values[map[iCol - 1]].Add(dValue); } catch (KeyNotFoundException) { break; } iCol++; } } if (_xAxis.Count <= 1) throw new ArgumentNullException(string.Format(CultureInfo.CurrentCulture, "{0} could not be parsed and might be an invalid csv file.", fileName)); } } else throw new FileNotFoundException(); }
public static async Task <ReadResult> Read(this EventSocket eventSocket, string uuid, ReadOptions options) { try { // todo: what if applicationresult is null (hang up occurs before the application completes) var result = new ReadResult( await eventSocket.ExecuteApplication(uuid, "read", options.ToString()).ConfigureAwait(false), options.ChannelVariableName); if (!result.Success) { LogFailedApplicationResult(eventSocket, result); } return(result); } catch (TaskCanceledException ex) { return(new ReadResult(null, null)); } }
protected ContentItem(SessionAwareCoreServiceClient client) { Client = client; ReadOptions = new ReadOptions(); ContentManager = new ContentManager(Client); }
public Iterator NewIterator(ColumnFamilyHandle handle, ReadOptions readOptions) => this.db.NewIterator(handle, readOptions);
public bool TryRead(byte[] hash, out Stream stream, ReadOptions options = ReadOptions.None, string encodingName = null) { if (hash == null) throw new ArgumentNullException("hash"); var contentPath = GetContentPath(hash, encodingName); if (!File.Exists(contentPath)) { stream = null; return false; } stream = new FileStream(contentPath, FileMode.Open, FileAccess.Read, FileShare.Read, BufferSize, (FileOptions)options); return true; }
public void CreatePropIndex(string type_name, string value_name) { var _write_lock = GetTableWriteLock(type_name); lock (_write_lock) { using (var snapshot = leveld_db.CreateSnapshot()) { var table_info = GetTableInfo(type_name); if (table_info.Columns[value_name] == LinqDbTypes.binary_ || table_info.Columns[value_name] == LinqDbTypes.string_) { throw new LinqDbException("Linqdb: Property type is not supported as memory index: " + value_name); } string snapshot_id = Ldb.GetNewSpnapshotId(); var ro = new ReadOptions().SetSnapshot(snapshot); int total = GetTableRowCount(table_info, ro); List <int> ids = null; if (!existing_indexes.ContainsKey(type_name) || !existing_indexes[type_name].Contains("Id")) { IndexGeneric index = new IndexGeneric() { ColumnName = "Id", ColumnType = LinqDbTypes.int_, TypeName = type_name, Parts = new List <IndexPart>(), IndexType = IndexType.PropertyOnly }; ids = ReadAllIds(table_info, ro, total); int counter = 0; IndexPart cpart = null; foreach (var id in ids) { if (counter % 1000 == 0) { cpart = new IndexPart() { IntValues = new List <int>(1000) }; index.Parts.Add(cpart); } cpart.IntValues.Add(id); counter++; } if (!existing_indexes.ContainsKey(type_name)) { existing_indexes[type_name] = new HashSet <string>() { "Id" }; } else { existing_indexes[type_name].Add("Id"); } indexes[type_name + "|Id|" + snapshot_id] = index; latest_snapshots[type_name + "|Id"] = snapshot_id; snapshots_alive.TryAdd(type_name + "|Id", new List <Tuple <bool, string> >() { new Tuple <bool, string>(false, type_name + "|Id|" + snapshot_id) }); last_cleanup.TryAdd(type_name + "|Id", DateTime.Now); // var skey = MakeSnapshotKey(table_info.TableNumber, table_info.ColumnNumbers["Id"]); leveld_db.Put(skey, Encoding.UTF8.GetBytes(snapshot_id)); } else { ids = new List <int>(total); var skey = MakeSnapshotKey(table_info.TableNumber, table_info.ColumnNumbers["Id"]); var snapid = leveld_db.Get(skey, null, ro); var id_snapshot_id = Encoding.UTF8.GetString(snapid); var index = indexes[type_name + "|Id|" + id_snapshot_id]; for (int i = 0; i < index.Parts.Count(); i++) { ids.AddRange(index.Parts[i].IntValues); } } if (!existing_indexes[type_name].Contains(value_name)) { IndexGeneric index = new IndexGeneric() { ColumnName = value_name, TypeName = type_name, Parts = new List <IndexPart>(), IndexType = IndexType.PropertyOnly }; var latest_snapshot_id = latest_snapshots[type_name + "|Id"]; switch (table_info.Columns[value_name]) { case LinqDbTypes.int_: index.ColumnType = LinqDbTypes.int_; int totalex = 0; if ((ids.Any() ? ids.Max() : 0) < 250000000) { var ivalues = ReadAllIntValuesList(index.ColumnName, table_info, ro, ids.Any() ? ids.Max() : 0, out totalex); if (totalex != ids.Count()) { throw new LinqDbException("Linqdb: column " + index.ColumnName + " has gaps in data. Prior to building an index gaps must be updated with values."); } var id_index = indexes[type_name + "|Id|" + latest_snapshot_id]; for (int j = 0; j < id_index.Parts.Count(); j++) { var part = id_index.Parts[j]; var new_part = new IndexPart() { IntValues = new List <int>(part.IntValues.Count()) }; index.Parts.Add(new_part); for (int k = 0; k < part.IntValues.Count(); k++) { var val = ivalues[(int)part.IntValues[k]]; new_part.IntValues.Add(val); } } } else { var ivalues = ReadAllIntValuesDic(index.ColumnName, table_info, ro, ids.Any() ? ids.Max() : 0, ids.Count(), out totalex); if (totalex != ids.Count()) { throw new LinqDbException("Linqdb: column " + index.ColumnName + " has gaps in data. Prior to building an index gaps must be updated with values."); } var id_index = indexes[type_name + "|Id|" + latest_snapshot_id]; for (int j = 0; j < id_index.Parts.Count(); j++) { var part = id_index.Parts[j]; var new_part = new IndexPart() { IntValues = new List <int>(part.IntValues.Count()) }; index.Parts.Add(new_part); for (int k = 0; k < part.IntValues.Count(); k++) { var val = ivalues[(int)part.IntValues[k]]; new_part.IntValues.Add(val); } } } break; case LinqDbTypes.double_: case LinqDbTypes.DateTime_: index.ColumnType = LinqDbTypes.double_; int totalexd = 0; if ((ids.Any() ? ids.Max() : 0) < 250000000) { var dvalues = ReadAllDoubleValuesList(index.ColumnName, table_info, ro, ids.Any() ? ids.Max() : 0, out totalexd); if (totalexd != ids.Count()) { throw new LinqDbException("Linqdb: column " + index.ColumnName + " has gaps in data. Prior to building an index gaps must be updated with values."); } var id_index = indexes[type_name + "|Id|" + latest_snapshot_id]; for (int j = 0; j < id_index.Parts.Count(); j++) { var part = id_index.Parts[j]; var new_part = new IndexPart() { DoubleValues = new List <double>(part.IntValues.Count()) }; index.Parts.Add(new_part); for (int k = 0; k < part.IntValues.Count(); k++) { var val = dvalues[(int)part.IntValues[k]]; new_part.DoubleValues.Add(val); } } } else { var dvalues = ReadAllDoubleValuesDic(index.ColumnName, table_info, ro, ids.Any() ? ids.Max() : 0, ids.Count(), out totalexd); if (totalexd != ids.Count()) { throw new LinqDbException("Linqdb: column " + index.ColumnName + " has gaps in data. Prior to building an index gaps must be updated with values."); } var id_index = indexes[type_name + "|Id|" + latest_snapshot_id]; for (int j = 0; j < id_index.Parts.Count(); j++) { var part = id_index.Parts[j]; var new_part = new IndexPart() { DoubleValues = new List <double>(part.IntValues.Count()) }; index.Parts.Add(new_part); for (int k = 0; k < part.IntValues.Count(); k++) { var val = dvalues[(int)part.IntValues[k]]; new_part.DoubleValues.Add(val); } } } break; } existing_indexes[type_name].Add(value_name); indexes[type_name + "|" + value_name + "|" + snapshot_id] = index; latest_snapshots[type_name + "|" + value_name] = snapshot_id; snapshots_alive.TryAdd(type_name + "|" + value_name, new List <Tuple <bool, string> >() { new Tuple <bool, string>(false, type_name + "|" + value_name + "|" + snapshot_id) }); last_cleanup.TryAdd(type_name + "|" + value_name, DateTime.Now); // var skey = MakeSnapshotKey(table_info.TableNumber, table_info.ColumnNumbers[value_name]); leveld_db.Put(skey, Encoding.UTF8.GetBytes(snapshot_id)); } else //could be group index, we need property one { var latest_snapshot_id = latest_snapshots[type_name + "|" + value_name]; var index = indexes[type_name + "|" + value_name + "|" + latest_snapshot_id]; if (index.IndexType == IndexType.GroupOnly) { var rdic = new Dictionary <int, int>(index.GroupListMapping.Count()); foreach (var d in index.GroupListMapping) { rdic[d.Value] = d.Key; } for (int i = 0; i < index.Parts.Count(); i++) { int icount = index.Parts[i].GroupValues.Count(); var gv = index.Parts[i].GroupValues; index.Parts[i].IntValues = new List <int>(icount); var iv = index.Parts[i].IntValues; for (int j = 0; j < icount; j++) { iv.Add(rdic[gv[j]]); } } index.IndexType = IndexType.Both; } } } } GC.Collect(); GC.WaitForPendingFinalizers(); }
public static void Main(String[] args) { int startArgs = 0; int i; int codePage = 0; string zipfile = null; string targdir = null; string password = null; List <string> entriesToExtract = new List <String>(); bool extractToConsole = false; ActionDesired action = ActionDesired.Extract; ExtractExistingFileAction behaviorForExistingFile = ExtractExistingFileAction.DoNotOverwrite; bool wantQuiet = false; bool wantFlatten = false; System.IO.Stream bitbucket = System.IO.Stream.Null; System.IO.Stream outstream = null; // because the comments and filenames on zip entries may be UTF-8 //System.Console.OutputEncoding = new System.Text.UTF8Encoding(); if (args.Length == 0) { Usage(); } if (args[0] == "-") { extractToConsole = true; outstream = Console.OpenStandardOutput(); startArgs = 1; } for (i = startArgs; i < args.Length; i++) { switch (args[i]) { case "-cp": i++; if (args.Length <= i) { Usage(); } if (codePage != 0) { Usage(); } System.Int32.TryParse(args[i], out codePage); break; case "-d": i++; if (args.Length <= i) { Usage(); } if (targdir != null) { Usage(); } if (extractToConsole) { Usage(); } if (action != ActionDesired.Extract) { Usage(); } targdir = args[i]; break; case "-f": wantFlatten = true; if (action != ActionDesired.Extract) { Usage(); } break; case "-i": if (password != null) { Usage(); } if (targdir != null) { Usage(); } if (wantQuiet) { Usage(); } if (entriesToExtract.Count > 0) { Usage(); } action = ActionDesired.Info; break; case "-l": if (password != null) { Usage(); } if (targdir != null) { Usage(); } if (wantQuiet) { Usage(); } if (entriesToExtract.Count > 0) { Usage(); } if (behaviorForExistingFile == ExtractExistingFileAction.OverwriteSilently) { Usage(); } action = ActionDesired.List; break; case "-o": behaviorForExistingFile = ExtractExistingFileAction.OverwriteSilently; if (action != ActionDesired.Extract) { Usage(); } break; case "-r": if (wantFlatten == true) { Usage(); } if (targdir != null) { Usage(); } if (action == ActionDesired.Test) { Usage(); } action = ActionDesired.Repair; break; case "-p": i++; if (args.Length <= i) { Usage(); } if (password != null) { Usage(); } password = args[i]; break; case "-q": if (action == ActionDesired.List) { Usage(); } wantQuiet = true; break; case "-t": action = ActionDesired.Test; if (targdir != null) { Usage(); } //if (wantQuiet) Usage(); if (entriesToExtract.Count > 0) { Usage(); } break; case "-?": Usage(); break; default: // positional args if (zipfile == null) { zipfile = args[i]; } else if (action != ActionDesired.Extract) { Usage(); } else { entriesToExtract.Add(args[i]); } break; } } if (zipfile == null) { Console.WriteLine("unzip: No zipfile specified.\n"); Usage(); } if (!System.IO.File.Exists(zipfile)) { Console.WriteLine("unzip: That zip file does not exist!\n"); Usage(); } if (targdir == null) { targdir = "."; } try { if (action == ActionDesired.Repair) { ZipFile.FixZipDirectory(zipfile); } else { var options = new ReadOptions { Encoding = (codePage != 0) ? System.Text.Encoding.GetEncoding(codePage) : null }; using (ZipFile zip = ZipFile.Read(zipfile, options)) { if (entriesToExtract.Count > 0) { // extract specified entries foreach (var entryToExtract in entriesToExtract) { // find the entry ZipEntry e = zip[entryToExtract]; if (e == null) { System.Console.WriteLine(" entry ({0}) does not exist in the zip archive.", entryToExtract); } else { if (wantFlatten) { e.FileName = System.IO.Path.GetFileName(e.FileName); } if (password == null) { if (e.UsesEncryption) { System.Console.WriteLine(" That entry ({0}) requires a password to extract.", entryToExtract); } else if (extractToConsole) { e.Extract(outstream); } else { e.Extract(targdir, behaviorForExistingFile); } } else { if (extractToConsole) { e.ExtractWithPassword(outstream, password); } else { e.ExtractWithPassword(targdir, behaviorForExistingFile, password); } } } } } else if (action == ActionDesired.Info) { System.Console.WriteLine("{0}", zip.Info); } else { // extract all, or list, or test // The logic here does almost the same thing as the ExtractAll() method // on the ZipFile class. But in this case we *could* have control over // it, for example only extract files of a certain type, or whose names // matched a certain pattern, or whose lastmodified times fit a certain // condition, or use a different password for each entry, etc. We can // also display status for each entry, as here. Int64 totalUncompressedSize = 0; bool header = true; foreach (ZipEntry e in zip.EntriesSorted) { if (!wantQuiet) { if (header) { System.Console.WriteLine("Zipfile: {0}", zip.Name); if ((zip.Comment != null) && (zip.Comment != "")) { System.Console.WriteLine("Comment: {0}", zip.Comment); } System.Console.WriteLine("\n{1,-22} {2,10} {3,5} {4,10} {5,3} {6,8} {0}", "Filename", "Modified", "Size", "Ratio", "Packed", "pw?", "CRC"); System.Console.WriteLine(new System.String('-', 80)); header = false; } totalUncompressedSize += e.UncompressedSize; System.Console.WriteLine("{1,-22} {2,10} {3,5:F0}% {4,10} {5,3} {6:X8} {0}", e.FileName, e.LastModified.ToString("yyyy-MM-dd HH:mm:ss"), e.UncompressedSize, e.CompressionRatio, e.CompressedSize, (e.UsesEncryption) ? "Y" : "N", e.Crc); if ((e.Comment != null) && (e.Comment != "")) { System.Console.WriteLine(" Comment: {0}", e.Comment); } } if (action == ActionDesired.Extract) { if (e.UsesEncryption) { if (password == null) { System.Console.WriteLine("unzip: {0}: Cannot extract this entry without a password.", e.FileName); } else { if (wantFlatten) { e.FileName = System.IO.Path.GetFileName(e.FileName); } if (extractToConsole) { e.ExtractWithPassword(outstream, password); } else { e.ExtractWithPassword(targdir, behaviorForExistingFile, password); } } } else { if (wantFlatten) { e.FileName = System.IO.Path.GetFileName(e.FileName); } if (extractToConsole) { e.Extract(outstream); } else { e.Extract(targdir, behaviorForExistingFile); } } } else if (action == ActionDesired.Test) { e.ExtractWithPassword(bitbucket, password); } } // foreach if (!wantQuiet) { System.Console.WriteLine(new System.String('-', 80)); System.Console.WriteLine("{1,-22} {2,10} {3,5} {4,10} {5,3} {6,8} {0}", zip.Entries.Count.ToString() + " files", "", totalUncompressedSize, "", "", "", ""); } } // else (extract all) } // end using(), the underlying file is closed. } } catch (System.Exception ex1) { System.Console.Error.WriteLine("exception: " + ex1); } Console.WriteLine(); }
static void Main(string[] args) { string temp = Path.GetTempPath(); string path = Environment.ExpandEnvironmentVariables(Path.Combine(temp, "rocksdb_simple_hl_example")); // the Options class contains a set of configurable DB options // that determines the behavior of a database // Why is the syntax, SetXXX(), not very C#-like? See Options for an explanation var options = new DbOptions() .SetCreateIfMissing(true) .EnableStatistics(); using (var db = RocksDb.Open(options, path)) { try { { // With strings string value = db.Get("key"); db.Put("key", "value"); value = db.Get("key"); string iWillBeNull = db.Get("non-existent-key"); db.Remove("key"); } { // With bytes var key = Encoding.UTF8.GetBytes("key"); byte[] value = Encoding.UTF8.GetBytes("value"); db.Put(key, value); value = db.Get(key); byte[] iWillBeNull = db.Get(new byte[] { 0, 1, 2 }); db.Remove(key); db.Put(key, new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }); } { // With buffers var key = Encoding.UTF8.GetBytes("key"); var buffer = new byte[100]; long length = db.Get(key, buffer, 0, buffer.Length); } { // Removal of non-existent keys db.Remove("I don't exist"); } { // Write batches // With strings using (WriteBatch batch = new WriteBatch() .Put("one", "uno") .Put("two", "deuce") .Put("two", "dos") .Put("three", "tres")) { db.Write(batch); } // With bytes var utf8 = Encoding.UTF8; using (WriteBatch batch = new WriteBatch() .Put(utf8.GetBytes("four"), new byte[] { 4, 4, 4 }) .Put(utf8.GetBytes("five"), new byte[] { 5, 5, 5 })) { db.Write(batch); } } { // Snapshots using (var snapshot = db.CreateSnapshot()) { var before = db.Get("one"); db.Put("one", "1"); var useSnapshot = new ReadOptions() .SetSnapshot(snapshot); // the database value was written Debug.Assert(db.Get("one") == "1"); // but the snapshot still sees the old version var after = db.Get("one", readOptions: useSnapshot); Debug.Assert(after == before); } } var two = db.Get("two"); Debug.Assert(two == "dos"); { // Iterators using (var iterator = db.NewIterator( readOptions: new ReadOptions() .SetIterateUpperBound("t") )) { iterator.Seek("k"); Debug.Assert(iterator.Valid()); Debug.Assert(iterator.StringKey() == "key"); iterator.Next(); Debug.Assert(iterator.Valid()); Debug.Assert(iterator.StringKey() == "one"); Debug.Assert(iterator.StringValue() == "1"); iterator.Next(); Debug.Assert(!iterator.Valid()); } } } catch (RocksDbException) { } } }
public void FunctionalTest() { string temp = Path.GetTempPath(); var testdir = Path.Combine(temp, "functional_test"); var testdb = Path.Combine(testdir, "main"); var testcp = Path.Combine(testdir, "cp"); var path = Environment.ExpandEnvironmentVariables(testdb); var cppath = Environment.ExpandEnvironmentVariables(testcp); if (Directory.Exists(testdir)) { Directory.Delete(testdir, true); } Directory.CreateDirectory(testdir); var options = new DbOptions() .SetCreateIfMissing(true) .EnableStatistics(); // Using standard open using (var db = RocksDb.Open(options, path)) { // With strings string value = db.Get("key"); db.Put("key", "value"); Assert.Equal("value", db.Get("key")); Assert.Null(db.Get("non-existent-key")); db.Remove("key"); Assert.Null(db.Get("value")); // With bytes db.Put(Encoding.UTF8.GetBytes("key"), Encoding.UTF8.GetBytes("value")); Assert.True(BinaryComparer.Default.Equals(Encoding.UTF8.GetBytes("value"), db.Get(Encoding.UTF8.GetBytes("key")))); // non-existent kiey Assert.Null(db.Get(new byte[] { 0, 1, 2 })); db.Remove(Encoding.UTF8.GetBytes("key")); Assert.Null(db.Get(Encoding.UTF8.GetBytes("key"))); db.Put(Encoding.UTF8.GetBytes("key"), new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }); // With buffers var buffer = new byte[100]; long length = db.Get(Encoding.UTF8.GetBytes("key"), buffer, 0, buffer.Length); Assert.Equal(8, length); Assert.Equal(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 }, buffer.Take((int)length).ToList()); buffer = new byte[5]; length = db.Get(Encoding.UTF8.GetBytes("key"), buffer, 0, buffer.Length); Assert.Equal(8, length); Assert.Equal(new byte[] { 0, 1, 2, 3, 4 }, buffer.Take((int)Math.Min(buffer.Length, length))); length = db.Get(Encoding.UTF8.GetBytes("bogus"), buffer, 0, buffer.Length); Assert.Equal(-1, length); // Write batches // With strings using (WriteBatch batch = new WriteBatch() .Put("one", "uno") .Put("two", "deuce") .Put("two", "dos") .Put("three", "tres")) { db.Write(batch); } Assert.Equal("uno", db.Get("one")); // With save point using (WriteBatch batch = new WriteBatch()) { batch .Put("hearts", "red") .Put("diamonds", "red"); batch.SetSavePoint(); batch .Put("clubs", "black"); batch.SetSavePoint(); batch .Put("spades", "black"); batch.RollbackToSavePoint(); db.Write(batch); } Assert.Equal("red", db.Get("diamonds")); Assert.Equal("black", db.Get("clubs")); Assert.Null(db.Get("spades")); // Save a checkpoint using (var cp = db.Checkpoint()) { cp.Save(cppath); } // With bytes var utf8 = Encoding.UTF8; using (WriteBatch batch = new WriteBatch() .Put(utf8.GetBytes("four"), new byte[] { 4, 4, 4 }) .Put(utf8.GetBytes("five"), new byte[] { 5, 5, 5 })) { db.Write(batch); } Assert.True(BinaryComparer.Default.Equals(new byte[] { 4, 4, 4 }, db.Get(utf8.GetBytes("four")))); // Snapshots using (var snapshot = db.CreateSnapshot()) { var before = db.Get("one"); db.Put("one", "1"); var useSnapshot = new ReadOptions() .SetSnapshot(snapshot); // the database value was written Assert.Equal("1", db.Get("one")); // but the snapshot still sees the old version var after = db.Get("one", readOptions: useSnapshot); Assert.Equal(before, after); } var two = db.Get("two"); Assert.Equal("dos", two); // Iterators using (var iterator = db.NewIterator( readOptions: new ReadOptions() .SetIterateUpperBound("t") )) { iterator.Seek("k"); Assert.True(iterator.Valid()); Assert.Equal("key", iterator.StringKey()); iterator.Next(); Assert.True(iterator.Valid()); Assert.Equal("one", iterator.StringKey()); Assert.Equal("1", iterator.StringValue()); iterator.Next(); Assert.False(iterator.Valid()); } // MultiGet var multiGetResult = db.MultiGet(new[] { "two", "three", "nine" }); Assert.Equal( expected: new[] { new KeyValuePair <string, string>("two", "dos"), new KeyValuePair <string, string>("three", "tres"), new KeyValuePair <string, string>("nine", null) }, actual: multiGetResult ); } // Test reading checkpointed db using (var cpdb = RocksDb.Open(options, cppath)) { Assert.Equal("red", cpdb.Get("diamonds")); Assert.Equal("black", cpdb.Get("clubs")); Assert.Null(cpdb.Get("spades")); // Checkpoint occurred before these changes: Assert.Null(cpdb.Get("four")); } // Test various operations using (var db = RocksDb.Open(options, path)) { // Nulls should be allowed here db.CompactRange((byte[])null, (byte[])null); db.CompactRange((string)null, (string)null); } // Test with column families var optionsCf = new DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true); var columnFamilies = new ColumnFamilies { { "reverse", new ColumnFamilyOptions() }, }; using (var db = RocksDb.Open(optionsCf, path, columnFamilies)) { var reverse = db.GetColumnFamily("reverse"); db.Put("one", "uno"); db.Put("two", "dos"); db.Put("three", "tres"); db.Put("uno", "one", cf: reverse); db.Put("dos", "two", cf: reverse); db.Put("tres", "three", cf: reverse); } // Test Cf Delete using (var db = RocksDb.Open(optionsCf, path, columnFamilies)) { var reverse = db.GetColumnFamily("reverse"); db.Put("cuatro", "four", cf: reverse); db.Put("cinco", "five", cf: reverse); Assert.Equal("four", db.Get("cuatro", cf: reverse)); Assert.Equal("five", db.Get("cinco", cf: reverse)); byte[] keyBytes = Encoding.UTF8.GetBytes("cuatro"); db.Remove(keyBytes, reverse); db.Remove("cinco", reverse); Assert.Null(db.Get("cuatro", cf: reverse)); Assert.Null(db.Get("cinco", cf: reverse)); } // Test list { var list = RocksDb.ListColumnFamilies(optionsCf, path); Assert.Equal(new[] { "default", "reverse" }, list.ToArray()); } // Test reopen with column families using (var db = RocksDb.Open(optionsCf, path, columnFamilies)) { var reverse = db.GetColumnFamily("reverse"); Assert.Equal("uno", db.Get("one")); Assert.Equal("one", db.Get("uno", cf: reverse)); Assert.Null(db.Get("uno")); Assert.Null(db.Get("one", cf: reverse)); } // Test dropping and creating column family using (var db = RocksDb.Open(options, path, columnFamilies)) { db.DropColumnFamily("reverse"); var reverse = db.CreateColumnFamily(new ColumnFamilyOptions(), "reverse"); Assert.Null(db.Get("uno", cf: reverse)); db.Put("red", "rouge", cf: reverse); Assert.Equal("rouge", db.Get("red", cf: reverse)); } // Test reopen after drop and create using (var db = RocksDb.Open(options, path, columnFamilies)) { var reverse = db.GetColumnFamily("reverse"); Assert.Null(db.Get("uno", cf: reverse)); Assert.Equal("rouge", db.Get("red", cf: reverse)); } // Test read only using (var db = RocksDb.OpenReadOnly(options, path, columnFamilies, false)) { Assert.Equal("uno", db.Get("one")); } // Test SstFileWriter { using (var writer = new SstFileWriter()) { } var envOpts = new EnvOptions(); var ioOpts = new ColumnFamilyOptions(); using (var sst = new SstFileWriter(envOpts, ioOpts)) { var filename = Path.Combine(temp, "test.sst"); if (File.Exists(filename)) { File.Delete(filename); } sst.Open(filename); sst.Add("four", "quatro"); sst.Add("one", "uno"); sst.Add("two", "dos"); sst.Finish(); using (var db = RocksDb.Open(options, path, columnFamilies)) { Assert.NotEqual("four", db.Get("four")); var ingestOptions = new IngestExternalFileOptions() .SetMoveFiles(true); db.IngestExternalFiles(new string[] { filename }, ingestOptions); Assert.Equal("quatro", db.Get("four")); } } } // test comparator unsafe { var opts = new ColumnFamilyOptions() .SetComparator(new IntegerStringComparator()); var filename = Path.Combine(temp, "test.sst"); if (File.Exists(filename)) { File.Delete(filename); } using (var sst = new SstFileWriter(ioOptions: opts)) { sst.Open(filename); sst.Add("111", "111"); sst.Add("1001", "1001"); // this order is only allowed using an integer comparator sst.Finish(); } } // test write batch with index { var wbwi = new WriteBatchWithIndex(reservedBytes: 1024); wbwi.Put("one", "un"); wbwi.Put("two", "deux"); var oneValueIn = Encoding.UTF8.GetBytes("one"); var oneValueOut = wbwi.Get("one"); Assert.Equal("un", oneValueOut); using (var db = RocksDb.Open(options, path, columnFamilies)) { var oneCombinedOut = wbwi.Get(db, "one"); var threeCombinedOut = wbwi.Get(db, "three"); Assert.Equal("un", oneCombinedOut); Assert.Equal("tres", threeCombinedOut); using (var wbIterator = wbwi.NewIterator(db.NewIterator())) { wbIterator.Seek("o"); Assert.True(wbIterator.Valid()); var itkey = wbIterator.StringKey(); Assert.Equal("one", itkey); var itval = wbIterator.StringValue(); Assert.Equal("un", itval); wbIterator.Next(); Assert.True(wbIterator.Valid()); itkey = wbIterator.StringKey(); Assert.Equal("three", itkey); itval = wbIterator.StringValue(); Assert.Equal("tres", itval); wbIterator.Next(); Assert.True(wbIterator.Valid()); itkey = wbIterator.StringKey(); Assert.Equal("two", itkey); itval = wbIterator.StringValue(); Assert.Equal("deux", itval); wbIterator.Next(); Assert.False(wbIterator.Valid()); } db.Write(wbwi); var oneDbOut = wbwi.Get("one"); Assert.Equal("un", oneDbOut); } } // compact range { using (var db = RocksDb.Open(options, path, columnFamilies)) { db.CompactRange("o", "tw"); } } // Test that GC does not cause access violation on Comparers { if (Directory.Exists("test-av-error")) { Directory.Delete("test-av-error", true); } options = new RocksDbSharp.DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true); var sc = new RocksDbSharp.StringComparator(StringComparer.InvariantCultureIgnoreCase); columnFamilies = new RocksDbSharp.ColumnFamilies { { "cf1", new RocksDbSharp.ColumnFamilyOptions() .SetComparator(sc) }, }; GC.Collect(); using (var db = RocksDbSharp.RocksDb.Open(options, "test-av-error", columnFamilies)) { } if (Directory.Exists("test-av-error")) { Directory.Delete("test-av-error", true); } } // Smoke test various options { var dbname = "test-options"; if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } var optsTest = (DbOptions) new RocksDbSharp.DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true) .SetBlockBasedTableFactory(new BlockBasedTableOptions().SetBlockCache(Cache.CreateLru(1024 * 1024))); GC.Collect(); using (var db = RocksDbSharp.RocksDb.Open(optsTest, dbname)) { } if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } } // Smoke test OpenWithTtl { var dbname = "test-with-ttl"; if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } var optsTest = (DbOptions) new RocksDbSharp.DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true); using (var db = RocksDbSharp.RocksDb.OpenWithTtl(optsTest, dbname, 1)) { } if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } } // Smoke test MergeOperator { var dbname = "test-merge-operator"; if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } var optsTest = (DbOptions) new RocksDbSharp.DbOptions() .SetCreateIfMissing(true) .SetMergeOperator(MergeOperators.Create( name: "test-merge-operator", partialMerge: (key, keyLength, operandsList, operandsListLength, numOperands, success, newValueLength) => IntPtr.Zero, fullMerge: (key, keyLength, existingValue, existingValueLength, operandsList, operandsListLength, numOperands, success, newValueLength) => IntPtr.Zero, deleteValue: (value, valueLength) => { } )); GC.Collect(); using (var db = RocksDbSharp.RocksDb.Open(optsTest, dbname)) { } if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } } // Test that GC does not cause access violation on Comparers { var dbname = "test-av-error"; if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } options = new RocksDbSharp.DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true); var sc = new RocksDbSharp.StringComparator(StringComparer.InvariantCultureIgnoreCase); columnFamilies = new RocksDbSharp.ColumnFamilies { { "cf1", new RocksDbSharp.ColumnFamilyOptions() .SetComparator(sc) }, }; GC.Collect(); using (var db = RocksDbSharp.RocksDb.Open(options, dbname, columnFamilies)) { } if (Directory.Exists(dbname)) { Directory.Delete(dbname, true); } } }
public PeekResult Peek(PeekParameters parameters) { try { if (parameters == null) { throw new ArgumentNullException("parameters"); } if (string.IsNullOrWhiteSpace(parameters.ItemUri)) { throw new ArgumentException(Resources.MissingItemUri); } if (Client.IsExistingObject(parameters.ItemUri)) { var readOptions = new ReadOptions { LoadFlags = LoadFlags.Expanded | LoadFlags.WebDavUrls }; var item = Client.Read(parameters.ItemUri, readOptions); switch (GetItemType(item.Id)) { case ItemType.Category: return(CategoryResult.From((CategoryData)item, Client, CurrentUserId)); case ItemType.Component: return(ComponentResult.From((ComponentData)item, CurrentUserId)); case ItemType.ComponentTemplate: return(ComponentTemplateResult.From((ComponentTemplateData)item, Client, CurrentUserId)); case ItemType.Folder: return(FolderResult.From((FolderData)item, CurrentUserId)); case ItemType.Group: return(GroupResult.From((GroupData)item)); case ItemType.Keyword: return(KeywordResult.From((KeywordData)item, CurrentUserId)); case ItemType.MultimediaType: return(MultimediaTypeResult.From((MultimediaTypeData)item)); case ItemType.Page: return(PageResult.From((PageData)item, Client, CurrentUserId)); case ItemType.PageTemplate: return(PageTemplateResult.From((PageTemplateData)item, Client, CurrentUserId)); case ItemType.Publication: return(PublicationResult.From((PublicationData)item, CurrentUserId)); case ItemType.PublicationTarget: return(PublicationTargetResult.From((PublicationTargetData)item)); case ItemType.Schema: return(SchemaResult.From((SchemaData)item, CurrentUserId)); case ItemType.StructureGroup: return(StructureGroupResult.From((StructureGroupData)item, CurrentUserId)); case ItemType.TargetGroup: return(TargetGroupResult.From((TargetGroupData)item, CurrentUserId)); case ItemType.TargetType: return(TargetTypeResult.From((TargetTypeData)item)); case ItemType.TemplateBuildingBlock: return(TemplateBuildingBlockResult.From((TemplateBuildingBlockData)item, Client, CurrentUserId)); case ItemType.User: return(UserResult.From((UserData)item, Client)); case ItemType.VirtualFolder: return(VirtualFolderResult.From((VirtualFolderData)item, CurrentUserId)); } } return(new EmptyResult()); } catch (Exception ex) { throw new HttpResponseException(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } }
static WalletIndexer() { string path = Path.GetFullPath($"Index_{Settings.Default.Magic:X8}"); Directory.CreateDirectory(path); db = DB.Open(path, new Options { CreateIfMissing = true }); if (db.TryGet(ReadOptions.Default, SliceBuilder.Begin(DataEntryPrefix.SYS_Version), out Slice value) && Version.TryParse(value.ToString(), out Version version) && version >= Version.Parse("2.5.4")) { ReadOptions options = new ReadOptions { FillCache = false }; foreach (var group in db.Find(options, SliceBuilder.Begin(DataEntryPrefix.IX_Group), (k, v) => new { Height = k.ToUInt32(1), Id = v.ToArray() })) { UInt160[] accounts = db.Get(options, SliceBuilder.Begin(DataEntryPrefix.IX_Accounts).Add(group.Id)).ToArray().AsSerializableArray <UInt160>(); indexes.Add(group.Height, new HashSet <UInt160>(accounts)); foreach (UInt160 account in accounts) { accounts_tracked.Add(account, new HashSet <CoinReference>()); } } foreach (Coin coin in db.Find(options, SliceBuilder.Begin(DataEntryPrefix.ST_Coin), (k, v) => new Coin { Reference = k.ToArray().Skip(1).ToArray().AsSerializable <CoinReference>(), Output = v.ToArray().AsSerializable <TransactionOutput>(), State = (CoinState)v.ToArray()[60] })) { accounts_tracked[coin.Output.ScriptHash].Add(coin.Reference); coins_tracked.Add(coin.Reference, coin); } } else { WriteBatch batch = new WriteBatch(); ReadOptions options = new ReadOptions { FillCache = false }; using (Iterator it = db.NewIterator(options)) { for (it.SeekToFirst(); it.Valid(); it.Next()) { batch.Delete(it.Key()); } } batch.Put(SliceBuilder.Begin(DataEntryPrefix.SYS_Version), Assembly.GetExecutingAssembly().GetName().Version.ToString()); db.Write(WriteOptions.Default, batch); } Thread thread = new Thread(ProcessBlocks) { IsBackground = true, Name = $"{nameof(WalletIndexer)}.{nameof(ProcessBlocks)}" }; thread.Start(); }
/// <summary> /// Compares a (set of) test curve(s) with a (set of) reference curve(s). Calculates a tube, if test curve data == null. /// </summary> /// <param name="modelName">Name, e.g. model name.</param> /// <param name="referenceFileName">Full path name of file with data of reference curve.</param> /// <param name="testFileName">Full path name of file with data of test curve.</param> /// <param name="result">Result identifier. Identities the result in reference and test, that shall be compared. Set result = "" for comparing all results in reference file.</param> /// <param name="options">Options for calculation of tube size, chart and saving.</param> /// <returns>List of tube reports.</returns> public List<TubeReport> ValidateCSV(string modelName, string referenceFileName, string testFileName, string result, IOptions options, ReadOptions readOptions) { List<TubeReport> reportList = new List<TubeReport>(); TubeReport report; CsvFile refCsvFile; CsvFile testCsvFile = null; int CalculationFailedCount = 0; int validCount = 0; int invalidCount = 0; bool useGivenResult = !String.IsNullOrWhiteSpace(result); bool testExists = (!String.IsNullOrWhiteSpace(testFileName) && (File.Exists(testFileName))); double[] refX, refY, testX, testY; if (String.IsNullOrWhiteSpace(referenceFileName) || (!File.Exists(referenceFileName))) return reportList; // read curve data, set arrays with x values refCsvFile = new CsvFile(referenceFileName, readOptions, options.Log); refX = refCsvFile.XAxis.ToArray(); if (testExists) { testCsvFile = new CsvFile(testFileName, readOptions, options.Log); testX = testCsvFile.XAxis.ToArray(); } else testX = null; // for each curve in the set of curves from reference file foreach (KeyValuePair<string, List<double>> entry in refCsvFile.Results) { if (!useGivenResult) result = entry.Key; // Validate, if there is a curve with the same key in the set of curves in the test file or just calculate tube if !testExists if (testExists && !testCsvFile.Results.ContainsKey(result)) continue; // set arrays with y values refY = refCsvFile.Results[result].ToArray(); if (testExists) testY = testCsvFile.Results[result].ToArray(); else testY = null; report = Validate(modelName, result, refX, refY, testX, testY, options); if (report.Valid == Validity.Valid) validCount++; else if (report.Valid == Validity.Invalid) invalidCount++; if (report.ErrorStep != Step.None) CalculationFailedCount++; reportList.Add(report); if (useGivenResult) break; } if (options.Log != null) { // write in log file options.Log.WriteLine(LogLevel.Done, "-----------------------------------------------------------------------"); options.Log.WriteLine(LogLevel.Done, "Calculation failed: " + CalculationFailedCount); options.Log.WriteLine(LogLevel.Done, "Valid Results: " + validCount); options.Log.WriteLine(LogLevel.Done, "Invalid Results: " + invalidCount); } return reportList; }
/// <summary> /// Runs the sample using the specified Event Hubs connection information. /// </summary> /// /// <param name="connectionString">The connection string for the Event Hubs namespace that the sample should target.</param> /// <param name="eventHubName">The name of the Event Hub, sometimes known as its path, that she sample should run against.</param> /// public async Task RunAsync(string connectionString, string eventHubName) { // An Event Hub consumer is associated with a specific Event Hub and consumer group. The consumer group is // a label that identifies one or more consumers as a set. Often, consumer groups are named after the responsibility // of the consumer in an application, such as "Telemetry" or "OrderProcessing". When an Event Hub is created, a default // consumer group is created with it, called "$Default." // // Each consumer has a unique view of the events in a partition that it reads from, meaning that events are available to all // consumers and are not removed from the partition when a consumer reads them. This allows for one or more consumers to read and // process events from the partition at different speeds and beginning with different events without interfering with // one another. // // When events are published, they will continue to exist in the partition and be available for consuming until they // reach an age where they are older than the retention period. // (see: https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-faq#what-is-the-maximum-retention-period-for-events) // // Because events are not removed from the partition when consuming, a consumer must specify where in the partition it // would like to begin reading events. For example, this may be starting from the very beginning of the stream, at an // offset from the beginning, the next event available after a specific point in time, or at a specific event. // // In this example, we will create our consumer client using the default consumer group that is created with an Event Hub. // Our consumer will begin watching the partition at the very end, reading only new events that we will publish for it. await using (var consumerClient = new EventHubConsumerClient(EventHubConsumerClient.DefaultConsumerGroupName, connectionString, eventHubName)) { // We will start by using the consumer client inspect the Event Hub and select a partition to operate against to ensure that events are being // published and read from the same partition. string firstPartition = (await consumerClient.GetPartitionIdsAsync()).First(); // Because our consumer is reading from the latest position, it won't see events that have previously // been published. Before we can publish the events and have them observed, we will need to ask the consumer // to perform an operation, because it opens its connection only when it needs to. // // When a maximum wait time is specified, the iteration will ensure that it returns control after that time has elapsed, // whether or not an event is available in the partition. If no event was available a null value will be emitted instead. // This is intended to return control to the loop and avoid blocking for an indeterminate period of time to allow event // processors to verify that the iterator is still consuming the partition and to make decisions on whether or not to continue // if events are not arriving. // // We'll begin to iterate on the partition using a small wait time, so that control will return to our loop even when // no event is available. For the first call, we'll publish so that we can receive them. // // For this example, we will specify a maximum wait time, and won't exit the loop until we've received at least one more // event than we published, which is expected to be a null value triggered by exceeding the wait time. // // To be sure that we do not block forever waiting on an event that is not published, we will request cancellation after a // fairly long interval. CancellationTokenSource cancellationSource = new CancellationTokenSource(); cancellationSource.CancelAfter(TimeSpan.FromSeconds(30)); ReadOptions readOptions = new ReadOptions { MaximumWaitTime = TimeSpan.FromMilliseconds(250) }; bool wereEventsPublished = false; int eventBatchCount = 0; List <EventData> receivedEvents = new List <EventData>(); Stopwatch watch = Stopwatch.StartNew(); await foreach (PartitionEvent currentEvent in consumerClient.ReadEventsFromPartitionAsync(firstPartition, EventPosition.Latest, readOptions, cancellationSource.Token)) { if (!wereEventsPublished) { await using (var producerClient = new EventHubProducerClient(connectionString, eventHubName)) { using EventDataBatch eventBatch = await producerClient.CreateBatchAsync(new CreateBatchOptions { PartitionId = firstPartition }); eventBatch.TryAdd(new EventData(Encoding.UTF8.GetBytes("Hello, Event Hubs!"))); eventBatch.TryAdd(new EventData(Encoding.UTF8.GetBytes("Goodbye, Event Hubs!"))); await producerClient.SendAsync(eventBatch); wereEventsPublished = true; eventBatchCount = eventBatch.Count; await Task.Delay(250); Console.WriteLine("The event batch has been published."); } } else { receivedEvents.Add(currentEvent.Data); if (receivedEvents.Count > eventBatchCount) { watch.Stop(); break; } } } // Print out the events that we received. Console.WriteLine(); Console.WriteLine($"The following events were consumed in { watch.ElapsedMilliseconds } milliseconds:"); foreach (EventData eventData in receivedEvents) { // The body of our event was an encoded string; we'll recover the // message by reversing the encoding process. string message = (eventData == null) ? "<< This was a null event >>" : Encoding.UTF8.GetString(eventData.Body.ToArray()); Console.WriteLine($"\tMessage: \"{ message }\""); } } // At this point, our clients have passed their "using" scope and have safely been disposed of. We // have no further obligations. Console.WriteLine(); }
//http://blog.building-blocks.com/uploading-images-using-the-core-service-in-sdl-tridion-2011 public static void UploadImages(string location, string folderTcmId, CoreService2010Client client, log4net.ILog Log) { //create a reference to the directory of where the images are DirectoryInfo directory = new DirectoryInfo(location); //create global Tridion Read Options ReadOptions readOptions = new ReadOptions(); //use Expanded so that Tridion exposes the TcmId of the newly created component readOptions.LoadFlags = LoadFlags.Expanded; try { //loop through the files foreach (FileInfo fileInfo in directory.GetFiles()) { //only allow images if (IsAllowedFileType(fileInfo.Extension)) { try { //create a new multimedia component in the folder specified ComponentData multimediaComponent = (ComponentData)client.GetDefaultData(Tridion.ItemType.Component, folderTcmId); multimediaComponent.Title = fileInfo.Name.ToLower(); multimediaComponent.ComponentType = ComponentType.Multimedia; multimediaComponent.Schema.IdRef = ConfigurationManager.AppSettings["MultimediaSchemaId"]; //create a string to hold the temporary location of the image to use later string tempLocation = ""; //use the StreamUpload2010Client to upload the image into Tridion UploadResponse us = new UploadResponse(); using (Tridion.StreamUpload2010Client streamClient = new StreamUpload2010Client()) { FileStream objfilestream = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read); tempLocation = streamClient.UploadBinaryContent(fileInfo.Name.ToLower(), objfilestream); } //creat a new binary component BinaryContentData binaryContent = new BinaryContentData(); //set this temporary upload location to the source of this binary binaryContent.UploadFromFile = tempLocation; binaryContent.Filename = fileInfo.Name.ToLower(); //get the multimedia type id binaryContent.MultimediaType = new LinkToMultimediaTypeData() { IdRef = GetMultimediaTypeId(fileInfo.Extension) }; multimediaComponent.BinaryContent = binaryContent; //save the image into a new object IdentifiableObjectData savedComponent = client.Save(multimediaComponent, readOptions); //check in using the Id of the new object client.CheckIn(savedComponent.Id, null); } catch (Exception ex) { Log.Debug("Error creating image " + fileInfo.Name, ex); } } } } catch (Exception ex) { Log.Error("Error processing images", ex); } finally { //clean up temp objects } }
static void Main(string[] args) { ulong writeBufferSize = 33554432; int maxWriteBufferNumber = 2; int minWriteBufferNumberToMerge = 1; uint memtablePrefixBloomBits = 10000000; int memtablePrefixBloomProbes = 10; ulong memtablePrefixBloomHugePageTlbSize = 2 * 1024 * 1024; string temp = Path.GetTempPath(); string path = Environment.ExpandEnvironmentVariables(Path.Combine(temp, "rocksdb_prefix_example")); var bbto = new BlockBasedTableOptions() .SetFilterPolicy(BloomFilterPolicy.Create(10, false)) .SetWholeKeyFiltering(false) ; var options = new DbOptions() .SetCreateIfMissing(true) .SetCreateMissingColumnFamilies(true) ; var columnFamilies = new ColumnFamilies { { "default", new ColumnFamilyOptions().OptimizeForPointLookup(256) }, { "test", new ColumnFamilyOptions() //.SetWriteBufferSize(writeBufferSize) //.SetMaxWriteBufferNumber(maxWriteBufferNumber) //.SetMinWriteBufferNumberToMerge(minWriteBufferNumberToMerge) .SetMemtableHugePageSize(2 * 1024 * 1024) .SetPrefixExtractor(SliceTransform.CreateFixedPrefix((ulong)8)) .SetBlockBasedTableFactory(bbto) }, }; using (var db = RocksDb.Open(options, path, columnFamilies)) { var cf = db.GetColumnFamily("test"); db.Put("00000000Zero", "", cf: cf); db.Put("00000000One", "", cf: cf); db.Put("00000000Two", "", cf: cf); db.Put("00000000Three", "", cf: cf); db.Put("00000001Red", "", cf: cf); db.Put("00000001Green", "", cf: cf); db.Put("00000001Black", "", cf: cf); db.Put("00000002Apple", "", cf: cf); db.Put("00000002Cranberry", "", cf: cf); db.Put("00000002Banana", "", cf: cf); var readOptions = new ReadOptions(); using (var iter = db.NewIterator(readOptions: readOptions, cf: cf)) { GC.Collect(); GC.WaitForPendingFinalizers(); var b = Encoding.UTF8.GetBytes("00000001"); iter.Seek(b); while (iter.Valid()) { Console.WriteLine(iter.StringKey()); iter.Next(); } } } Console.WriteLine("Done..."); }
internal Status InternalGet( ReadOptions options, Slice k, object arg, HandleResultDelegate saver) { Status s = new Status(); Iterator iiter = rep_.IndexBlock.NewIterator(rep_.Options.Comparator); iiter.Seek(k); if (iiter.Valid) { Slice handle_value = iiter.Value; FilterBlockReader filter = rep_.Filter; BlockHandle handle = new BlockHandle(); if (filter != null && handle.DecodeFrom(ref handle_value).IsOk && !filter.KeyMayMatch(handle.Offset, k)) { // Not found } else { Slice tempHandle = iiter.Value; Iterator blockIter = BlockReader(this, options, iiter.Value); blockIter.Seek(k); if (blockIter.Valid) { saver(arg, blockIter.Key, blockIter.Value); } s = blockIter.Status; } } if (s.IsOk) { s = iiter.Status; } return s; }
public List <OperResult> Intersect(QueryTree tree, List <OperResult> oper_list, TableInfo table_info, Dictionary <long, byte[]> cache, ReadOptions ro) { if (tree.IntersectInfo == null) { return(oper_list); } foreach (var inter in tree.IntersectInfo) { var res = IntersectOne(inter, table_info, cache, ro); var oper_res = new OperResult() { All = false, ResIds = res }; oper_res.Id = inter.Id; oper_res.OrWith = inter.OrWith; oper_list.Add(oper_res); } return(oper_list); }
/// <summary> /// Get a list of PublicationTargetData objects that represent Tridion publication targets, using the CoreService /// </summary> /// <param name="targetTypes">A list of TargetType URI's</param> /// <returns>a list of PublicationTargetData objects that represent Tridion publication targets</returns> public List<PublicationTargetData> GetPublicationTargetsFromTargetTypes(List<string> targetTypes) { List<PublicationTargetData> publicationTargets = new List<PublicationTargetData>(); PublicationTargetsFilterData targetFilter = new PublicationTargetsFilterData(); XElement publicationTargetList = this.Client.GetSystemWideListXml(targetFilter); foreach (XElement pubTargetNode in publicationTargetList.Elements(ToolsConfiguration.TridionCmNamespace + "Item")) { ReadOptions options = new ReadOptions(); options.LoadFlags = LoadFlags.Expanded; PublicationTargetData pubTarget = (PublicationTargetData)this.Client.Read(pubTargetNode.Attribute("ID").Value, options); foreach (LinkToTargetTypeData link in pubTarget.TargetTypes) { if (targetTypes.Contains(link.IdRef)) { publicationTargets.Add(pubTarget); } } } return publicationTargets; }
public List <int> IntersectOne(IntersectInfo info, TableInfo table_info, Dictionary <long, byte[]> cache, ReadOptions ro) { var result = new List <int>(); //if (info.Set.Count() > 50) //{ Oper odb = new Oper() { TableNumber = info.TableNumber, ColumnType = info.ColumnType, ColumnNumber = info.ColumnNumber, ColumnName = table_info.ColumnNumbers.First(f => f.Value == info.ColumnNumber).Key, TableName = table_info.Name }; var skey = MakeSnapshotKey(table_info.TableNumber, table_info.ColumnNumbers[odb.ColumnName]); var snapid = leveld_db.Get(skey, null, ro); if (snapid != null) { var snapshot_id = Encoding.UTF8.GetString(snapid); skey = MakeSnapshotKey(table_info.TableNumber, table_info.ColumnNumbers["Id"]); snapid = leveld_db.Get(skey, null, ro); string id_snapshot_id = Encoding.UTF8.GetString(snapid); var index_res = IntersectWithIndex(odb, info.Set, ro, snapshot_id, id_snapshot_id); if (index_res != null) { return(index_res); } } foreach (var val in info.Set) { Oper odb2 = new Oper() { TableNumber = info.TableNumber, ColumnType = info.ColumnType, ColumnNumber = info.ColumnNumber, ColumnName = table_info.ColumnNumbers.First(f => f.Value == info.ColumnNumber).Key, TableName = table_info.Name }; result.AddRange(EqualOperator(odb2, val, table_info, cache, ro, null, null)); } return(result); //} //foreach (var val in info.Set) //{ // Oper odb = new Oper() // { // TableNumber = info.TableNumber, // ColumnType = info.ColumnType, // ColumnNumber = info.ColumnNumber, // ColumnName = table_info.ColumnNumbers.First(f => f.Value == info.ColumnNumber).Key, // TableName = table_info.Name // }; // result.AddRange(EqualOperator(odb, val, table_info, cache, ro, null, null)); //} //return result; }
/// <summary> /// /// </summary> public CoreServicesUtil() { this.coreServiceClient = new CoreService2010Client("basicHttp_2010"); this.readOptions = new ReadOptions(); }
List <int> IntersectWithIndex(Oper odb, List <EncodedValue> val, ReadOptions ro, string snapshot_id, string id_snapshot_id) { var result = new List <int>(); if (string.IsNullOrEmpty(snapshot_id)) { return(null); } if (string.IsNullOrEmpty(odb.TableName) || string.IsNullOrEmpty(odb.ColumnName)) { throw new LinqDbException("Linqdb: bad indexes."); } if (!indexes.ContainsKey(odb.TableName + "|" + odb.ColumnName + "|" + snapshot_id)) { return(null); } var index = indexes[odb.TableName + "|" + odb.ColumnName + "|" + snapshot_id]; var ids_index = indexes[odb.TableName + "|Id|" + id_snapshot_id]; if (index.IndexType == IndexType.GroupOnly) { return(null); } int bloom_max = 1000000; switch (odb.ColumnType) { case LinqDbTypes.int_: HashSet <int> ivals = new HashSet <int>(val.Where(f => !f.IsNull).Select(f => f.IntVal)); List <bool> bloom_int = new List <bool>(bloom_max); for (int i = 0; i < bloom_max; i++) { bloom_int.Add(false); } foreach (var int_val in ivals) { bloom_int[int_val % bloom_max] = true; } int icount = index.Parts.Count(); for (int i = 0; i < icount; i++) { var ids = ids_index.Parts[i].IntValues; var iv = index.Parts[i].IntValues; int jcount = iv.Count(); for (int j = 0; j < jcount; j++) { int id = ids[j]; if (bloom_int[iv[j] % bloom_max]) { if (ivals.Contains(iv[j])) { result.Add(id); } } } } break; case LinqDbTypes.double_: case LinqDbTypes.DateTime_: HashSet <double> dvals = new HashSet <double>(val.Where(f => !f.IsNull).Select(f => f.DoubleVal)); List <bool> bloom_double = new List <bool>(bloom_max); for (int i = 0; i < bloom_max; i++) { bloom_double.Add(false); } foreach (var d_val in dvals) { bloom_double[Math.Abs(d_val.GetHashCode()) % bloom_max] = true; } int dcount = index.Parts.Count(); for (int i = 0; i < dcount; i++) { var ids = ids_index.Parts[i].IntValues; var iv = index.Parts[i].DoubleValues; int jcount = iv.Count(); for (int j = 0; j < jcount; j++) { int id = ids[j]; if (bloom_double[Math.Abs(iv[j].GetHashCode()) % bloom_max]) { if (dvals.Contains(iv[j])) { result.Add(id); } } } } break; default: return(null); } return(result); }
public ExecutionOptions(QueryOptions queryOptions, ReadOptions readOptions = null) { this.QueryOptions = queryOptions ?? new QueryOptions(); this.ReadOptions = readOptions ?? new ReadOptions(); }
public LevelDBBlockchain(string path) { header_index.Add(GenesisBlock.Hash); Version version; Slice value; db = DB.Open(path, new Options { CreateIfMissing = true }); if (db.TryGet(ReadOptions.Default, SliceBuilder.Begin(DataEntryPrefix.SYS_Version), out value) && Version.TryParse(value.ToString(), out version) && version >= Version.Parse("2.7.4")) { ReadOptions options = new ReadOptions { FillCache = false }; value = db.Get(options, SliceBuilder.Begin(DataEntryPrefix.SYS_CurrentBlock)); UInt256 current_header_hash = new UInt256(value.ToArray().Take(32).ToArray()); this.current_block_height = value.ToArray().ToUInt32(32); uint current_header_height = current_block_height; if (db.TryGet(options, SliceBuilder.Begin(DataEntryPrefix.SYS_CurrentHeader), out value)) { current_header_hash = new UInt256(value.ToArray().Take(32).ToArray()); current_header_height = value.ToArray().ToUInt32(32); } foreach (UInt256 hash in db.Find(options, SliceBuilder.Begin(DataEntryPrefix.IX_HeaderHashList), (k, v) => { using (MemoryStream ms = new MemoryStream(v.ToArray(), false)) using (BinaryReader r = new BinaryReader(ms)) { return(new { Index = k.ToArray().ToUInt32(1), Hashes = r.ReadSerializableArray <UInt256>() }); } }).OrderBy(p => p.Index).SelectMany(p => p.Hashes).ToArray()) { if (!hash.Equals(GenesisBlock.Hash)) { header_index.Add(hash); } stored_header_count++; } if (stored_header_count == 0) { Header[] headers = db.Find(options, SliceBuilder.Begin(DataEntryPrefix.DATA_Block), (k, v) => Header.FromTrimmedData(v.ToArray(), sizeof(long))).OrderBy(p => p.Index).ToArray(); for (int i = 1; i < headers.Length; i++) { header_index.Add(headers[i].Hash); } } else if (current_header_height >= stored_header_count) { for (UInt256 hash = current_header_hash; hash != header_index[(int)stored_header_count - 1];) { Header header = Header.FromTrimmedData(db.Get(options, SliceBuilder.Begin(DataEntryPrefix.DATA_Block).Add(hash)).ToArray(), sizeof(long)); header_index.Insert((int)stored_header_count, hash); hash = header.PrevHash; } } } else { WriteBatch batch = new WriteBatch(); ReadOptions options = new ReadOptions { FillCache = false }; using (Iterator it = db.NewIterator(options)) { for (it.SeekToFirst(); it.Valid(); it.Next()) { batch.Delete(it.Key()); } } db.Write(WriteOptions.Default, batch); Persist(GenesisBlock); db.Put(WriteOptions.Default, SliceBuilder.Begin(DataEntryPrefix.SYS_Version), GetType().GetTypeInfo().Assembly.GetName().Version.ToString()); } thread_persistence = new Thread(PersistBlocks); thread_persistence.Name = "LevelDBBlockchain.PersistBlocks"; thread_persistence.Start(); }
public void Read(Stream stream, ReadOptions options) { this.ReadCore(stream, options); }
public static PublishTransactionData PublishItem <T>(this T item, string[] targets, PublishInstructionData publishInstruction = null, PublishPriority priority = PublishPriority.Normal, ReadOptions readOptions = null) where T : RepositoryLocalObjectData { if (item == null) { throw new ArgumentNullException(); } if (targets == null || !targets.Any()) { throw new ArgumentNullException(); } return(PublishItems(new string[] { item.Id }, targets, publishInstruction, priority, readOptions).FirstOrDefault()); }
public static Status ReadBlock(RandomAccessFile file, ReadOptions options, BlockHandle handle, out BlockContents result) { result.Data = new Slice(); result.Cachable = false; result.HeapAllocated = false; // Read the block contents as well as the type/crc footer. // See table_builder.cc for the code that built this structure. int n = (int)handle.Size; ByteArrayPointer buf = new ByteArrayPointer(n + kBlockTrailerSize); Slice contents; Status s = file.Read(handle.Offset, n + kBlockTrailerSize, out contents, buf); if (!s.IsOk) { return s; } if (contents.Size != n + kBlockTrailerSize) { return Status.Corruption("truncated block read"); } // Check the crc of the type and the block contents ByteArrayPointer data = contents.Data; // Pointer to where Read put the data if (options.VerifyChecksums) { // const uint32_t crc = crc32c::Unmask(DecodeFixed32(data + n + 1)); //const uint32_t actual = crc32c::Value(data, n + 1); //if (actual != crc) { // delete[] buf; // s = Status::Corruption("block checksum mismatch"); // return s; //} throw new NotSupportedException("CRC is not supported yet"); } switch ((CompressionType)data[n]) { case CompressionType.kNoCompression: if (data != buf) { // File implementation gave us pointer to some other data. // Use it directly under the assumption that it will be live // while the file is open. result.Data = new Slice(data, n); result.HeapAllocated = false; result.Cachable = false; // Do not double-cache } else { result.Data = new Slice(buf, n); result.HeapAllocated = true; result.Cachable = true; } // Ok break; case CompressionType.kSnappyCompression: throw new NotSupportedException("snappy not supported"); // { // int ulength = 0; // if (!port::Snappy_GetUncompressedLength(data, n, &ulength)) { // delete[] buf; // return Status::Corruption("corrupted compressed block contents"); // } // char* ubuf = new char[ulength]; // if (!port::Snappy_Uncompress(data, n, ubuf)) { // delete[] buf; // delete[] ubuf; // return Status::Corruption("corrupted compressed block contents"); // } // delete[] buf; // result->data = Slice(ubuf, ulength); // result->heap_allocated = true; // result->cachable = true; // break; //} default: return Status.Corruption("bad block type"); } return Status.OK; }
public static PublishTransactionData[] PublishItems <T>(this T items, string[] targets, PublishInstructionData publishInstruction = null, PublishPriority priority = PublishPriority.Normal, ReadOptions readOptions = null) where T : IEnumerable <RepositoryLocalObjectData> { if (items == null || !items.Any()) { throw new ArgumentNullException(); } if (targets == null || !targets.Any()) { throw new ArgumentNullException(); } return(PublishItems(items.Select(x => x.Id).ToArray(), targets, publishInstruction, priority, readOptions)); }
public void TestSnapshot() { var path = CleanTestDB(); using (var db = new DB(path, new Options { CreateIfMissing = true })) { db.Put("Tampa", "green"); db.Put("London", "red"); db.Delete("New York"); using (var snapShot = db.CreateSnapshot()) { var readOptions = new ReadOptions { Snapshot = snapShot }; db.Put("New York", "blue"); Assert.AreEqual(db.Get("Tampa", readOptions), "green"); Assert.AreEqual(db.Get("London", readOptions), "red"); // Snapshot taken before key was updates Assert.IsNull(db.Get("New York", readOptions)); } // can see the change now Assert.AreEqual(db.Get("New York"), "blue"); } }
public static PublishTransactionData[] PublishItems(this string[] items, string[] targets, PublishInstructionData publishInstruction = null, PublishPriority priority = PublishPriority.Normal, ReadOptions readOptions = null) { if (items == null || !items.Any()) { throw new ArgumentNullException(); } if (targets == null || !targets.Any()) { throw new ArgumentNullException(); } return(TridionCoreServiceFactory.Publish(items, targets, publishInstruction, priority, readOptions)); }
public override void Process(ServiceProcess process, object arguments) { PagePublisherParameters parameters = (PagePublisherParameters)arguments; process.SetCompletePercentage(0); process.SetStatus("Initializing"); using (var coreService = Client.GetCoreService()) { _pagePublisherData = new PagePublisherData(); string[] pageIds; if (parameters.LocationId.EndsWith("-1") || parameters.LocationId.EndsWith("-4")) // Publication or Structure Group { // get a list of the items from the core service ItemsFilterData filter = GetFilter(parameters); XElement listXml = coreService.GetListXml(parameters.LocationId, filter); // Get the page id's that will be published pageIds = GetPageIds(listXml); } else // Component { var readOptions = new ReadOptions(); // Get the current component var componentData = (ComponentData)coreService.Read(parameters.LocationId, readOptions); // Get the initial set of using items var filter = new UsingItemsFilterData { BaseColumns = ListBaseColumns.Default, IncludedVersions = VersionCondition.OnlyLatestAndCheckedOutVersions, IncludeLocalCopies = true, ItemTypes = new[] { ItemType.Component, ItemType.Page } }; var usingItemsXml = coreService.GetListXml(parameters.LocationId, filter); var pageIdsList = GetPageIds(usingItemsXml).ToList(); var level = 1; // We set the depth limit to 10, just so that we will never get an infinite loop in case // component 1 is included within a component 2 that is included within component 1. int depthLimit = 10; var componentIdsList = GetComponentIds(usingItemsXml).ToList(); var targets = componentIdsList.Distinct(StringComparer.InvariantCultureIgnoreCase); while (level <= depthLimit && targets.Count() > 0) { var nextTargets = new HashSet<string>(); foreach (var targetId in targets) { usingItemsXml = coreService.GetListXml(targetId, filter); pageIdsList.AddRange(GetPageIds(usingItemsXml)); foreach (var e in usingItemsXml.Elements()) { nextTargets.Add(e.Attribute("ID").Value); } } targets = nextTargets.ToList(); level++; } pageIds = pageIdsList.ToArray(); } int batchSize = 5; int currentBatch = 0; // Publish pages try { double ratio = pageIds.Count() /batchSize; double percentage = 100/ratio; double currperc = 0; while (currentBatch * batchSize < pageIds.Count()) { string[] nextBatch = pageIds.Skip(currentBatch * batchSize) .Take(batchSize).ToArray(); coreService.Publish(nextBatch, GetPublishInstructionData(parameters), parameters.TargetUri, parameters.Priority, new ReadOptions()); currentBatch++; currperc += percentage; if (currperc >= 1) { process.IncrementCompletePercentage(); currperc = 0; } } _pagePublisherData.SuccessMessage = string.Format("{0} Pages published successfully", pageIds.Length.ToString()); } catch (Exception ex) { //process.Complete(string.Format("Failed to publish, reason: {0}", ex.Message)); _pagePublisherData.FailedMessage = string.Format("Page publishing failed, reason {0}", ex.Message); } process.Complete("done"); } }
List <int> EqualOperatorWithIndex(Oper odb, EncodedValue val, TableInfo table_info, ReadOptions ro, string snapshot_id, string id_snapshot_id) { List <int> result = new List <int>(); if (odb.ColumnName == "Id") { return(null); } if (string.IsNullOrEmpty(table_info.Name) || string.IsNullOrEmpty(odb.ColumnName)) { throw new LinqDbException("Linqdb: bad indexes."); } if (!indexes.ContainsKey(table_info.Name + "|" + odb.ColumnName + "|" + snapshot_id)) { return(null); } var index = indexes[table_info.Name + "|" + odb.ColumnName + "|" + snapshot_id]; var ids_index = indexes[table_info.Name + "|Id|" + id_snapshot_id]; if (index.IndexType == IndexType.GroupOnly) { return(null); } switch (table_info.Columns[odb.ColumnName]) { case LinqDbTypes.int_: //if (val.IsNull) //{ // int icount = index.Parts.Count(); // for (int i = 0; i < icount; i++) // { // var ids = ids_index.Parts[i].IntValues; // var iv = index.Parts[i].IntValues; // int jcount = iv.Count(); // for (int j = 0; j < jcount; j++) // { // if (iv[j] == null) // { // result.Add((int)ids[j]); // } // } // } //} //else //{ int icount = index.Parts.Count(); int ival = val.IntVal; for (int i = 0; i < icount; i++) { var ids = ids_index.Parts[i].IntValues; var iv = index.Parts[i].IntValues; int jcount = iv.Count(); for (int j = 0; j < jcount; j++) { if (iv[j] == ival) { result.Add(ids[j]); } } } //} break; case LinqDbTypes.double_: case LinqDbTypes.DateTime_: if (val.IsNull) { int icountd = index.Parts.Count(); for (int i = 0; i < icountd; i++) { var ids = ids_index.Parts[i].IntValues; var iv = index.Parts[i].DoubleValues; int jcount = iv.Count(); for (int j = 0; j < jcount; j++) { if (iv[j] == null) { result.Add((int)ids[j]); } } } } else { int icountd = index.Parts.Count(); for (int i = 0; i < icountd; i++) { var ids = ids_index.Parts[i].IntValues; var iv = index.Parts[i].DoubleValues; int jcount = iv.Count(); for (int j = 0; j < jcount; j++) { if (iv[j] == val.DoubleVal) { result.Add((int)ids[j]); } } } } break; default: return(null); } return(result); }
internal static Iterator NewTwoLevelIterator( Iterator index_iter, BlockFunctionDelegate block_function, object arg, ReadOptions options) { return new TwoLevelIterator(index_iter, block_function, arg, options); }
List <int> EqualOperator(Oper odb, EncodedValue val, TableInfo table_info, Dictionary <long, byte[]> cache, ReadOptions ro, string snapshot_id, string id_snapshot_id) { if (!string.IsNullOrEmpty(snapshot_id)) { var index_res = EqualOperatorWithIndex(odb, val, table_info, ro, snapshot_id, id_snapshot_id); if (index_res != null) { return(index_res); } } byte[] byte_val = null; if (val.IsNull) { byte_val = NullConstant; } else if (odb.ColumnType == LinqDbTypes.double_ || odb.ColumnType == LinqDbTypes.DateTime_) { if (val.DoubleVal < 0) { odb.ColumnNumber *= -1; val.DoubleVal = -1 * val.DoubleVal; } byte_val = BitConverter.GetBytes(val.DoubleVal).MyReverseNoCopy(); } else if ((odb.ColumnType == LinqDbTypes.int_)) { if (val.IntVal < 0) { odb.ColumnNumber *= -1; val.IntVal = -1 * val.IntVal; } byte_val = BitConverter.GetBytes(val.IntVal).MyReverseNoCopy(); } else if (odb.ColumnType == LinqDbTypes.string_) { byte_val = val.StringValue; } var result_set = new List <int>(); var key = MakeIndexSearchKey(new IndexKeyInfo() { ColumnNumber = odb.ColumnNumber, TableNumber = odb.TableNumber, Val = byte_val, ColumnType = odb.ColumnType }); using (var it = leveld_db.NewIterator(null, ro)) { it.Seek(key); if (!it.Valid()) { return(result_set); } var v = it.Key(); if (v == null) { return(result_set); } var kinfo = GetIndexKey(v); if (kinfo.NotKey || kinfo.TableNumber != odb.TableNumber || kinfo.ColumnNumber != odb.ColumnNumber) { return(result_set); } if (ValsEqual(kinfo.Val, byte_val)) { result_set.Add(kinfo.Id); //PutToCache(kinfo, cache); while (true) { it.Next(); if (!it.Valid()) { return(result_set); } var ckey = it.Key(); if (ckey == null) { return(result_set); } kinfo = GetIndexKey(ckey); if (kinfo.NotKey || kinfo.TableNumber != odb.TableNumber || kinfo.ColumnNumber != odb.ColumnNumber) { return(result_set); } if (ValsEqual(kinfo.Val, byte_val)) { result_set.Add(kinfo.Id); //PutToCache(kinfo, cache); } else { return(result_set); } } } return(result_set); } }
/// <summary> /// Returns a new iterator over the table contents. /// The result of NewIterator() is initially invalid (caller must /// call one of the Seek methods on the iterator before using it). /// </summary> public Iterator NewIterator(ReadOptions options) { return TwoLevelIterator.NewTwoLevelIterator( rep_.IndexBlock.NewIterator(rep_.Options.Comparator), Table.BlockReader, this, options); }
// // [TODO] // static void Main(string[] args) { CommandArgumentSet arguments = new CommandArgumentSet(); arguments.Register("-output", ""); if (arguments.Parse(args) == false) { return; } if (arguments.Filenames.Count != 1) { print_usage(); return; } string sInputFile = arguments.Filenames[0]; string sFilenameRoot = Path.GetFileNameWithoutExtension(sInputFile); if (!File.Exists(sInputFile)) { System.Console.WriteLine("cannot find file " + sInputFile); return; } DMesh3Builder builder = new DMesh3Builder(); StandardMeshReader reader = new StandardMeshReader() { MeshBuilder = builder }; ReadOptions read_options = ReadOptions.Defaults; read_options.ReadMaterials = true; IOReadResult readOK = reader.Read(sInputFile, read_options); if (readOK.code != IOCode.Ok) { System.Console.WriteLine("Error reading " + sInputFile); System.Console.WriteLine(readOK.message); return; } if (builder.Meshes.Count == 0) { System.Console.WriteLine("did not find any valid meshes in " + sInputFile); return; } // [TODO] out if count == 0 string sOutRoot = arguments.Strings["-output"]; if (sOutRoot.Length > 0) { bool bOutIsFolder = Directory.Exists(sOutRoot); if (!bOutIsFolder) { System.Console.WriteLine("-output folder {0} does not exist", sOutRoot); return; } } Dictionary <int, List <int> > MeshesByMaterial = new Dictionary <int, List <int> >(); MeshesByMaterial[-1] = new List <int>(); for (int i = 0; i < builder.Materials.Count; ++i) { MeshesByMaterial[i] = new List <int>(); } int N = builder.Meshes.Count; for (int i = 0; i < N; ++i) { int mati = builder.MaterialAssignment[i]; if (mati >= builder.Materials.Count) { mati = -1; } MeshesByMaterial[mati].Add(i); } int file_i = 0; foreach (int mat_i in MeshesByMaterial.Keys) { List <int> mesh_idxs = MeshesByMaterial[mat_i]; if (mesh_idxs.Count == 0) { continue; } WriteMesh[] write_meshes = new WriteMesh[mesh_idxs.Count]; for (int i = 0; i < mesh_idxs.Count; ++i) { write_meshes[i] = new WriteMesh(builder.Meshes[mesh_idxs[i]]); } string suffix = string.Format("_material{0}", file_i++); string sOutPath = Path.Combine(sOutRoot, sFilenameRoot + suffix + ".obj"); StandardMeshWriter writer = new StandardMeshWriter(); WriteOptions write_options = WriteOptions.Defaults; if (mat_i != -1) { write_options.bWriteMaterials = true; write_options.bPerVertexUVs = true; write_options.MaterialFilePath = Path.Combine(sOutRoot, sFilenameRoot + suffix + ".mtl"); GenericMaterial mat = builder.Materials[mat_i]; List <GenericMaterial> matList = new List <GenericMaterial>() { mat }; ConstantIndexMap idxmap = new ConstantIndexMap(0); for (int i = 0; i < write_meshes.Length; ++i) { write_meshes[i].Materials = matList; write_meshes[i].TriToMaterialMap = idxmap; } } IOWriteResult writeOK = writer.Write(sOutPath, new List <WriteMesh>(write_meshes), write_options); if (writeOK.code != IOCode.Ok) { System.Console.WriteLine("Error writing " + sOutPath); System.Console.WriteLine(writeOK.message); } } // ok done! //System.Console.ReadKey(); }
private static Iterator BlockReader(object arg, ReadOptions options, Slice indexValue) { Table table = (Table)arg; Cache blockCache = table.rep_.Options.BlockCache; Block block = null; Cache.Handle cacheHandle = null; BlockHandle handle = new BlockHandle(); Slice input = indexValue; Status s = handle.DecodeFrom(ref input); // We intentionally allow extra stuff in index_value so that we // can add more features in the future. if (s.IsOk) { BlockContents contents; if (blockCache != null) { ByteArrayPointer cacheKeyBuffer = new ByteArrayPointer(16); Coding.EncodeFixed64(cacheKeyBuffer, table.rep_.CacheId); Coding.EncodeFixed64(cacheKeyBuffer + 8, handle.Offset); Slice key = new Slice(cacheKeyBuffer, cacheKeyBuffer.Length); cacheHandle = blockCache.Lookup(key); if (cacheHandle != null) { block = (Block)(blockCache.Value(cacheHandle)); } else { s = FormatHelper.ReadBlock(table.rep_.File, options, handle, out contents); if (s.IsOk) { block = new Block(contents); if (contents.Cachable && options.FillCache) { cacheHandle = blockCache.Insert(key, block, block.Size); } } } } else { s = FormatHelper.ReadBlock(table.rep_.File, options, handle, out contents); if (s.IsOk) { block = new Block(contents); } } } Iterator iter; if (block != null) { iter = block.NewIterator(table.rep_.Options.Comparator); if (cacheHandle != null) { iter.RegisterCleanup(ReleaseBlock, blockCache, cacheHandle); } } else { iter = Iterator.NewErrorIterator(s); } return iter; }
/// <summary> /// Converts a DataTable to a list with generic objects /// </summary> /// <typeparam name="T">Generic object</typeparam> /// <param name="table">DataTable</param> /// <param name="options"></param> /// <returns>List with generic objects</returns> private static IEnumerable <T> AsEnumerableTyped <T>(this DataTable table, ReadOptions options) where T : class, new() { foreach (DataRow row in table.Rows) { T obj = new T(); foreach (var prop in obj.GetType().GetProperties()) { try { PropertyInfo p = obj.GetType().GetProperty(prop.Name); if (p == null) { continue; } string GetFieldNameFromCustomAttribute() { var atts = p.CustomAttributes.FirstOrDefault(c => c.AttributeType == typeof(ColumnTittleAttribute) || c.AttributeType == typeof(ColumnNameAttribute) || c.AttributeType == typeof(DisplayNameAttribute) ); if (options == null || options.TitlesInFirstRow) { //TODO: Refact when remove ColumnTitleAttribute Support if (atts == null) { return(p.Name); } if (!string.IsNullOrEmpty(p.GetCustomAttribute <DisplayNameAttribute>()?.DisplayName)) { return(p.GetCustomAttribute <DisplayNameAttribute>().DisplayName); } return(!string.IsNullOrEmpty(p.GetCustomAttribute <ColumnTittleAttribute>()?.Title) ? p.GetCustomAttribute <ColumnTittleAttribute>().Title : p.Name); } else { return(atts != null && !string.IsNullOrEmpty(p.GetCustomAttribute <ColumnNameAttribute>()?.ColumnName) ? p.GetCustomAttribute <ColumnNameAttribute>().ColumnName : p.Name); } } var fieldName = GetFieldNameFromCustomAttribute(); //parseamos las celdas vacías que no contienen cadenas var objValue = row[fieldName]; if (objValue == System.DBNull.Value) { objValue = string.Empty; } //Converters to parse data to typed field if (options?.Converters != null && options.Converters.Any() && options.Converters.ContainsKey(p.Name)) { var exp = options.Converters[p.Name]; var f = exp.Compile(); p.SetValue(obj, Convert.ChangeType(f.DynamicInvoke(objValue), p.PropertyType), null); } else { p.SetValue(obj, Convert.ChangeType(objValue, p.PropertyType), null); } } catch (Exception ex) { if (System.Diagnostics.Debugger.IsAttached) { PropertyInfo p = obj.GetType().GetProperty(prop.Name); Console.WriteLine($"{ex.Message} {p.Name}"); } continue; } } yield return(obj); // list.Add(obj); } }
static void Main(string[] args) { //args[0] = "tcm:11-403-8"; if (!args.Any()) { Log("Please pass the Schema Tcm Uri as a parameter."); return; } string schemaUri = args[0]; if (!TcmUri.IsValid(schemaUri)) { Log("The specified URI of " + schemaUri + " is not a valid URI, please pass the schema Tcm Uri as a parameter."); return; } SessionAwareCoreServiceClient client = new SessionAwareCoreServiceClient("netTcp_2013"); if (!client.IsExistingObject(schemaUri)) { Log("Could not find item with URI " + schemaUri + " in Tridion. Please pass the Schema Tcm Uri as a parameter."); return; } ReadOptions readOptions = new ReadOptions(); UsingItemsFilterData whereUsedFilter = new UsingItemsFilterData { ItemTypes = new[] { ItemType.Component } }; SchemaData schema = (SchemaData)client.Read(schemaUri, readOptions); SchemaFieldsData schemaFieldsData = client.ReadSchemaFields(schema.Id, true, readOptions); bool hasMeta = schemaFieldsData.MetadataFields.Any(); string newNamespace = schema.NamespaceUri; if (schema.Purpose == SchemaPurpose.Metadata) { List<IdentifiableObjectData> items = new List<IdentifiableObjectData>(); UsingItemsFilterData anyItem = new UsingItemsFilterData(); foreach (XElement node in client.GetListXml(schema.Id, anyItem).Nodes()) { string uri = node.Attribute("ID").Value; items.Add(client.Read(uri, readOptions)); } Log("Found " + items.Count + " items using schema..."); foreach (var item in items) { if (item is PublicationData) { PublicationData pub = (PublicationData)item; string meta = pub.Metadata; XmlDocument xml = new XmlDocument(); xml.LoadXml(meta); string oldnamespace = xml.DocumentElement.NamespaceURI; if (oldnamespace != newNamespace) { Log("Replacing namespace for publication " + pub.Id + " (" + pub.Title + ") - Current Namespace: " + oldnamespace); string metadata = meta.Replace(oldnamespace, newNamespace); pub.Metadata = metadata; client.Update(pub, readOptions); } } else if (item is RepositoryLocalObjectData) { RepositoryLocalObjectData data = (RepositoryLocalObjectData)item; string meta = data.Metadata; XmlDocument xml = new XmlDocument(); xml.LoadXml(meta); string oldnamespace = xml.DocumentElement.NamespaceURI; if (oldnamespace != newNamespace) { Log("Replacing namespace for item " + data.Id + " (" + data.Title + ") - Current Namespace: " + oldnamespace); string metadata = meta.Replace(oldnamespace, newNamespace); data.Metadata = metadata; client.Update(data, readOptions); } } } return; } List<ComponentData> components = new List<ComponentData>(); foreach (XElement node in client.GetListXml(schema.Id, whereUsedFilter).Nodes()) { string uri = node.Attribute("ID").Value; components.Add((ComponentData)client.Read(uri, readOptions)); } Log("Found " + components.Count + " components."); Log("Current schema namespace set to " + newNamespace + ", checking for components with incorrect namespace."); int count = 0; foreach (var component in components) { if (schema.Purpose == SchemaPurpose.Multimedia) { Log("Changing Multimedia Component"); string meta = component.Metadata; XmlDocument metaXml = new XmlDocument(); metaXml.LoadXml(meta); string metaOldnamespace = metaXml.DocumentElement.NamespaceURI; if (metaOldnamespace != newNamespace) { Log("Replacing namespace for item " + component.Id + " (" + component.Title + ") - Current Namespace: " + metaOldnamespace); string metadata = meta.Replace(metaOldnamespace, newNamespace); component.Metadata = metadata; client.Update(component, readOptions); } count++; Log(components.Count - count + " components remaining..."); continue; } string content = component.Content; XmlDocument xml = new XmlDocument(); xml.LoadXml(content); string oldnamespace = xml.DocumentElement.NamespaceURI; if (oldnamespace != newNamespace) { Log("Replacing namespace for component " + component.Id + " (" + component.Title + ") - Current Namespace: " + oldnamespace); content = content.Replace(oldnamespace, newNamespace); try { ComponentData editableComponent = component; editableComponent.Content = content; if (hasMeta) { string metadata = editableComponent.Metadata.Replace(oldnamespace, newNamespace); // Fix for new meta if (string.IsNullOrEmpty(metadata)) { metadata = string.Format("<Metadata xmlns=\"{0}\" />", newNamespace); Log("Component had no metadata, but schema specifies it has. Adding empty metadata node"); } editableComponent.Metadata = metadata; } if (!hasMeta && !(string.IsNullOrEmpty(editableComponent.Metadata))) { editableComponent.Metadata = string.Empty; } client.Update(editableComponent, readOptions); } catch (Exception ex) { Log("Error occurred trying to update component: " + component.Id + Environment.NewLine + ex); } } count++; Log(components.Count - count + " components remaining..."); } }
/// <summary> /// Read Table from Excel Sheet into a typed IEnumerable Collection /// </summary> /// <typeparam name="T"></typeparam> /// <param name="wb">ClosedXml WorkBook instance</param> /// <param name="sheetNumber">Workbook sheet number to read</param> /// <param name="options">Read options <see cref="ReadOptions"/></param> /// <returns></returns> public static IEnumerable <T> ReadTable <T>(this IXLWorkbook wb, int sheetNumber, ReadOptions options = null) where T : class, new() { if (sheetNumber <= 0 || sheetNumber > wb.Worksheets.Count) { throw new IndexOutOfRangeException($"{nameof(sheetNumber)} is Out of Range"); } var ws = wb.Worksheet(sheetNumber); var dt = ReadExcelSheet(ws, options); return(dt.AsEnumerableTyped <T>(options)); }
public IdentifiableObjectData Read(string tcmUri, ReadOptions options) { return this.Client.Read(tcmUri, new ReadOptions()); }
StringReader _innerReadToMatchAsReader(char leftChar, char rightChar, char[] leftQuotes, char[] rightQuotes, ReadOptions mode) { var idx = UnderlyingString.IndexOfNextMatch(leftChar, rightChar, leftQuotes, rightQuotes, CurrentPosition); if (idx == -1) { return(null); } else { if (UnderlyingString[CurrentPosition] == leftChar && mode.HasFlag(ReadOptions.DiscardKey)) { ++CurrentPosition; } return(_innerReadToAsReader(idx, 1, mode)); } }
public PeekResult Peek(PeekParameters parameters) { try { if (parameters == null) { throw new ArgumentNullException("parameters"); } if (string.IsNullOrWhiteSpace(parameters.ItemUri)) { throw new ArgumentException(Resources.MissingItemUri); } if (Client.IsExistingObject(parameters.ItemUri)) { var readOptions = new ReadOptions { LoadFlags = LoadFlags.Expanded | LoadFlags.WebDavUrls }; var item = Client.Read(parameters.ItemUri, readOptions); switch (GetItemType(item.Id)) { case ItemType.Category: return CategoryResult.From((CategoryData)item, Client, CurrentUserId); case ItemType.Component: return ComponentResult.From((ComponentData)item, CurrentUserId); case ItemType.ComponentTemplate: return ComponentTemplateResult.From((ComponentTemplateData)item, Client, CurrentUserId); case ItemType.Folder: return FolderResult.From((FolderData)item, CurrentUserId); case ItemType.Group: return GroupResult.From((GroupData)item); case ItemType.Keyword: return KeywordResult.From((KeywordData)item, CurrentUserId); case ItemType.MultimediaType: return MultimediaTypeResult.From((MultimediaTypeData)item); case ItemType.Page: return PageResult.From((PageData)item, Client, CurrentUserId); case ItemType.PageTemplate: return PageTemplateResult.From((PageTemplateData)item, Client, CurrentUserId); case ItemType.Publication: return PublicationResult.From((PublicationData)item, CurrentUserId); case ItemType.PublicationTarget: return PublicationTargetResult.From((PublicationTargetData)item); case ItemType.Schema: return SchemaResult.From((SchemaData)item, CurrentUserId); case ItemType.StructureGroup: return StructureGroupResult.From((StructureGroupData)item, CurrentUserId); case ItemType.TargetGroup: return TargetGroupResult.From((TargetGroupData)item, CurrentUserId); case ItemType.TargetType: return TargetTypeResult.From((TargetTypeData)item); case ItemType.TemplateBuildingBlock: return TemplateBuildingBlockResult.From((TemplateBuildingBlockData)item, Client, CurrentUserId); case ItemType.User: return UserResult.From((UserData)item, Client); case ItemType.VirtualFolder: return VirtualFolderResult.From((VirtualFolderData)item, CurrentUserId); } } return new EmptyResult(); } catch (Exception ex) { throw new HttpResponseException(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, ex.Message)); } }
/// <summary> /// Reads data from the input buffer until a break key(s) is found. /// </summary> /// <returns></returns> public ReadRef Read(ReadOptions options) { return(InputLoop.ReadInput(options)); }
/// <summary> /// Reads one or several records of a file in synchronous way. /// </summary> /// <param name="filename">File name to read.</param> /// <param name="recordIds">A list of item IDs to read, separated by the Record Separator character (30). Use StringFunctions.ComposeRecordIds to compose this string</param> /// <param name="dictionaries">List of dictionaries to read, separated by space. If this list is not set, all fields are returned. You may use the format LKFLDx where x is the attribute number.</param> /// <param name="readOptions">Object that defines the different reading options of the Function: Calculated, dictClause, conversion, formatSpec, originalRecords.</param> /// <param name="inputFormat">Indicates in what format you wish to send the record ids: MV, XML or JSON.</param> /// <param name="outputFormat">Indicates in what format you want to receive the data resulting from the Read, New, Update and Select operations: MV, XML, XML_DICT, XML_SCH, JSON, JSON_DICT or JSON_SCH.</param> /// <param name="customVars">Free text sent to the database allows management of additional behaviours in SUB.LK.MAIN.CONTROL.CUSTOM, which is called when this parameter is set.</param> /// <param name="receiveTimeout">Maximum time in seconds that the client will wait for a response from the server. Default = 0 to wait indefinitely.</param> /// <returns>The results of the operation.</returns> public string Read(string filename, string recordIds, string dictionaries = "", ReadOptions readOptions = null, DATAFORMAT_TYPE inputFormat = DATAFORMAT_TYPE.MV, DATAFORMATCRU_TYPE outputFormat = DATAFORMATCRU_TYPE.MV, string customVars = "", int receiveTimeout = 0) { if (this._ConnectionInfo != null) { string readArgs = OperationArguments.GetReadArgs(filename, recordIds, dictionaries, readOptions, customVars); byte opCode = (byte)OPERATION_CODE.READ; byte byteInputFormat = (byte)inputFormat; byte byteOutputFormat = (byte)outputFormat; string connectionInfo = this._ConnectionInfo.ToString(); string result = Linkar.ExecutePersistentOperation(this._ConnectionInfo, opCode, readArgs, byteInputFormat, byteOutputFormat, receiveTimeout); return(result); } else { return(""); } }
public void Snapshot() { // modify db Database.Set("key1", "value1"); // create snapshot using(var snapshot = Database.CreateSnapshot()) { // modify db again Database.Set("key2", "value2"); // read from snapshot var readOptions = new ReadOptions() { Snapshot = snapshot }; string val1 = Database.Get("key1", readOptions); Assert.Equal("value1", val1); string val2 = Database.Get("key2", readOptions); Assert.Null(val2); // read from non-snapshot readOptions.Snapshot = null; val1 = Database.Get("key1", readOptions); Assert.Equal("value1", val1); val2 = Database.Get("key2", readOptions); Assert.Equal("value2", val2); } // release snapshot // GC calls ~Snapshot() for us }
public IEnumerable<Item> GetItems(Guid id, ReadOptions readOptions) { return _context.Items .Include(i => i.Feed) .Where(f => f.Feed.FolderId == id && f.ReadDateTime == null) .OrderByDescending(i => i.PublishedDateTime); }
public void Get() { Database.Set("key1", "value1"); var value1 = Database.Get("key1"); Assert.Equal("value1", (string)value1); Database.Set("key2", "value2"); var value2 = Database.Get("key2"); Assert.Equal("value2", (string)value2); Database.Set("key3", "value3"); var value3 = Database.Get("key3"); Assert.Equal("value3", (string)value3); // verify checksum var options = new ReadOptions() { VerifyCheckSums = true }; value1 = Database.Get("key1", options); Assert.Equal("value1", (string)value1); // no fill cache options = new ReadOptions() { FillCache = false }; value2 = Database.Get("key2", options); Assert.Equal("value2", (string)value2); }
public void Create_LargeSegmentedArchive() { // There was a claim that large archives (around or above // 1gb) did not work well with archive splitting. This test // covers that case. #if REMOTE_FILESYSTEM string parentDir = Path.Combine("t:\\tdir", Path.GetFileNameWithoutExtension(TopLevelDir)); _FilesToRemove.Add(parentDir); Directory.CreateDirectory(parentDir); string zipFileToCreate = Path.Combine(parentDir, "Create_LargeSegmentedArchive.zip"); #else string zipFileToCreate = Path.Combine(TopLevelDir, "Create_LargeSegmentedArchive.zip"); #endif TestContext.WriteLine("Creating file {0}", zipFileToCreate); // This file will "cache" the randomly generated text, so we // don't have to generate more than once. You know, for // speed. string cacheFile = Path.Combine(TopLevelDir, "cacheFile.txt"); // int maxSegSize = 4*1024*1024; // int sizeBase = 20 * 1024 * 1024; // int sizeRandom = 1 * 1024 * 1024; // int numFiles = 3; // int maxSegSize = 80*1024*1024; // int sizeBase = 320 * 1024 * 1024; // int sizeRandom = 20 * 1024 * 1024 ; // int numFiles = 5; int maxSegSize = 120*1024*1024; int sizeBase = 420 * 1024 * 1024; int sizeRandom = 20 * 1024 * 1024; int numFiles = _rnd.Next(5) + 11; TestContext.WriteLine("The zip will contain {0} files", numFiles); int numSaving= 0, totalToSave = 0, numSegs= 0; long sz = 0; // There are a bunch of Action<T>'s here. This test method originally // used ZipFile.AddEntry overload that accepts an opener/closer pair. // It conjured content for the files out of a RandomTextGenerator // stream. This worked, but was very very slow. So I took a new // approach to use a WriteDelegate, and still contrive the data, but // cache it for entries after the first one. This makes things go much // faster. // // But, when using the WriteDelegate, the SaveProgress events of // flavor ZipProgressEventType.Saving_EntryBytesRead do not get // called. Therefore the progress updates are done from within the // WriteDelegate itself. The SaveProgress events for SavingStarted, // BeforeWriteEntry, and AfterWriteEntry do get called. As a result // this method uses 2 delegates: one for writing and one for the // SaveProgress events. WriteDelegate writer = (name, stream) => { Stream input = null; Stream cache = null; try { // use a cahce file as the content. The entry // name will vary but we'll get the content for // each entry from the a single cache file. if (File.Exists(cacheFile)) { input = File.Open(cacheFile, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); // Make the file slightly shorter with each // successive entry, - just to shake things // up a little. Also seek forward a little. var fl = input.Length; input.SetLength(fl - _rnd.Next(sizeRandom/2) + 5201); input.Seek(_rnd.Next(sizeRandom/2), SeekOrigin.Begin); } else { sz = sizeBase + _rnd.Next(sizeRandom); input = new Ionic.Zip.Tests.Utilities.RandomTextInputStream((int)sz); cache = File.Create(cacheFile); } _txrx.Send(String.Format("pb 2 max {0}", sz)); _txrx.Send("pb 2 value 0"); var buffer = new byte[8192]; int n; Int64 totalWritten = 0; int nCycles = 0; using (input) { while ((n= input.Read(buffer,0, buffer.Length))>0) { stream.Write(buffer,0,n); if (cache!=null) cache.Write(buffer,0,n); totalWritten += n; // for performance, don't update the // progress monitor every time. nCycles++; if (nCycles % 312 == 0) { _txrx.Send(String.Format("pb 2 value {0}", totalWritten)); _txrx.Send(String.Format("status Saving entry {0}/{1} {2} :: {3}/{4}mb {5:N0}%", numSaving, totalToSave, name, totalWritten/(1024*1024), sz/(1024*1024), ((double)totalWritten) / (0.01 * sz))); } } } } finally { if (cache!=null) cache.Dispose(); } }; EventHandler<SaveProgressEventArgs> sp = (sender1, e1) => { switch (e1.EventType) { case ZipProgressEventType.Saving_Started: numSaving= 0; break; case ZipProgressEventType.Saving_BeforeWriteEntry: _txrx.Send("test Large Segmented Zip"); _txrx.Send(String.Format("status saving {0}", e1.CurrentEntry.FileName)); totalToSave = e1.EntriesTotal; numSaving++; break; // case ZipProgressEventType.Saving_EntryBytesRead: // if (!_pb2Set) // { // _txrx.Send(String.Format("pb 2 max {0}", e1.TotalBytesToTransfer)); // _pb2Set = true; // } // _txrx.Send(String.Format("status Saving entry {0}/{1} {2} :: {3}/{4}mb {5:N0}%", // numSaving, totalToSave, // e1.CurrentEntry.FileName, // e1.BytesTransferred/(1024*1024), e1.TotalBytesToTransfer/(1024*1024), // ((double)e1.BytesTransferred) / (0.01 * e1.TotalBytesToTransfer))); // string msg = String.Format("pb 2 value {0}", e1.BytesTransferred); // _txrx.Send(msg); // break; case ZipProgressEventType.Saving_AfterWriteEntry: TestContext.WriteLine("Saved entry {0}, {1} bytes", e1.CurrentEntry.FileName, e1.CurrentEntry.UncompressedSize); _txrx.Send("pb 1 step"); _pb2Set = false; break; } }; _txrx = TestUtilities.StartProgressMonitor("largesegmentedzip", "Large Segmented ZIP", "Creating files"); _txrx.Send("bars 3"); _txrx.Send("pb 0 max 2"); _txrx.Send(String.Format("pb 1 max {0}", numFiles)); // build a large zip file out of thin air var sw = new StringWriter(); using (ZipFile zip = new ZipFile()) { zip.StatusMessageTextWriter = sw; zip.BufferSize = 256 * 1024; zip.CodecBufferSize = 128 * 1024; zip.MaxOutputSegmentSize = maxSegSize; zip.SaveProgress += sp; for (int i = 0; i < numFiles; i++) { string filename = TestUtilities.GetOneRandomUppercaseAsciiChar() + Path.GetFileNameWithoutExtension(Path.GetRandomFileName()) + ".txt"; zip.AddEntry(filename, writer); } zip.Save(zipFileToCreate); numSegs = zip.NumberOfSegmentsForMostRecentSave; } #if REMOTE_FILESYSTEM if (((long)numSegs*maxSegSize) < (long)(1024*1024*1024L)) { _FilesToRemove.Remove(parentDir); Assert.IsTrue(false, "There were not enough segments in that zip. numsegs({0}) maxsize({1}).", numSegs, maxSegSize); } #endif _txrx.Send("status Verifying the zip ..."); _txrx.Send("pb 0 step"); _txrx.Send("pb 1 value 0"); _txrx.Send("pb 2 value 0"); ReadOptions options = new ReadOptions { StatusMessageWriter = new StringWriter() }; string extractDir = "verify"; int c = 0; while (Directory.Exists(extractDir + c)) c++; extractDir += c; using (ZipFile zip2 = ZipFile.Read(zipFileToCreate, options)) { _numFilesToExtract = zip2.Entries.Count; _numExtracted= 1; _pb1Set= false; zip2.ExtractProgress += ExtractProgress; zip2.ExtractAll(extractDir); } string status = options.StatusMessageWriter.ToString(); TestContext.WriteLine("status:"); foreach (string line in status.Split('\n')) TestContext.WriteLine(line); }
/// <summary> /// The constructor reads the csv file to this object. /// </summary> /// <param name="fileName">Full path name of the csv file.</param> /// <param name="options">Options for reading csv files.</param> public CsvFile(string fileName, ReadOptions options) : this(fileName, options, null) { }
public async Task <ReadReply> ReadAsync(ReadOptions readOptions) => await Read(readOptions).CastTask <ReadReply>();
/// <summary> /// Reads one or several records of a file ina asynchronous way with XML input and output format. /// </summary> /// <param name="filename">File name to read.</param> /// <param name="records">A list of item IDs to read.</param> /// <param name="dictionaries">List of dictionaries to read, separated by space. If this list is not set, all fields are returned. You may use the format LKFLDx where x is the attribute number.</param> /// <param name="readOptions">Object that defines the different reading options of the Function: Calculated, dictClause, conversion, formatSpec, originalRecords.</param> /// <param name="xmlFormat">Different XML output formats.</param> /// <param name="customVars">Free text sent to the database allows management of additional behaviours in SUB.LK.MAIN.CONTROL.CUSTOM, which is called when this parameter is set.</param> /// <param name="receiveTimeout">Maximum time in seconds that the client will wait for a response from the server. Default = 0 to wait indefinitely.</param> /// <returns>The results of the operation.</returns> public Task <string> ReadAsync(string filename, string records, string dictionaries = "", ReadOptions readOptions = null, XML_FORMAT xmlFormat = XML_FORMAT.XML, string customVars = "", int receiveTimeout = 0) { var task = new Task <string>(() => { return(this.Read(filename, records, dictionaries, readOptions, xmlFormat, customVars, receiveTimeout)); }); task.Start(); return(task); }