public static IReadOnlyCollection<FileInfo> GenerateSchema(FileInfo xmlFile) { try { LH.Write($"\rGenerating Schema (FileInfo): {xmlFile.Name}\t\t\t\t\t"); IReadOnlyCollection<string> args = GetXSDArguments(xmlFile); bool result = CallXSD(args); if (!result) throw new FileLoadException($"The call to XSD failed on the file:\r\n{xmlFile.FullName}"); ImportFileType importType = ESRIHelper.GetImportFileType(xmlFile); string fileExtension = ESRIHelper.GetImportFileExtension(importType); string fileName = xmlFile.Name.Replace(fileExtension, "") .Trim('.') .Trim(); IReadOnlyCollection<FileInfo> results = WorkingDirectory.GetFiles($"{fileName}*{ESRIHelper.XmlSchemaExtension}", SearchOption.AllDirectories); return results; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static bool ProcessFile(FileInfo xmlFile) { try { LH.Write($"\rProcessing {xmlFile.Name} as an XML file\t\t\t\t\t"); IReadOnlyCollection<FileInfo> schemas = GenerateSchema(xmlFile); if (!schemas.Any()) //throw new FileLoadException($"FAILED TO LOAD XML SCHEMA: {xmlFile.Name}"); // TODO: GET THE XSLT(?) TRANSFORMATIONS WORKING SO WE CAN GET THE DBF.*.XML FILES PROCESSED AS WELL return false; IReadOnlyCollection<FileInfo> classes = GenerateClass(schemas); if (classes == null || !classes.Any()) return false; // TODO: Figure out and implement what to do with the class files at this point! //Parallel.ForEach(schemas, schema => schema.Delete()); return true; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateSchema(string xmlFilePath) { try { LH.Write($"\rGenerating Schema (string): {xmlFilePath}\t\t\t\t\t"); return GenerateSchema(new FileInfo(xmlFilePath)); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateClass(IReadOnlyCollection<string> schemaFilePaths) { try { LH.Write($"\rGenerating Class (string)\t\t\t\t\t"); return GenerateClass(schemaFilePaths.Select(s => new FileInfo(s)).ToArray()); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (StreamReader sr = new StreamReader(file.OpenRead())) { EncodingValue = sr.ReadToEnd(); LH.Write($"\r{file.Name.Split('.')[0]} has an encoding type of {EncodingValue}\t\t\t\t\t"); } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (StreamReader sr = new StreamReader(file.FullName)) { WellKnownText = sr.ReadToEnd(); sr.Close(); } LH.Write($"\r{file.Name.Split('.')[0]} has an SRID of {SRID}\t\t\t\t\t"); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateClass(IReadOnlyCollection<FileInfo> schemaFiles) { try { IReadOnlyCollection<string> args = GetXSDArguments(schemaFiles, ImportFileType.XmlSchema, true); List<string> cleanArgs = args.ToList(); cleanArgs.Remove(DatasetArgument); args = cleanArgs.AsReadOnly(); FileInfo firstSchemaFile = schemaFiles.FirstOrDefault(); if (firstSchemaFile == null || !firstSchemaFile.Exists) throw new FileNotFoundException("The first schema file in the collection does not exist or was not found"); ImportFileType importType = ESRIHelper.GetImportFileType(firstSchemaFile); string fileExtension = ESRIHelper.GetImportFileExtension(importType); string fileName = firstSchemaFile.Name.Replace(fileExtension, "") .Trim('.') .Replace(".", "_") .Trim(); LH.Write($"\rGenerating Classes (FileInfo): {fileName}\t\t\t\t\t"); bool result = CallXSD(args); if (!result) throw new FileLoadException( $"The call to XSD failed on the files:\r\n{fileName}"); IReadOnlyCollection<FileInfo> results = OutputDirectory.GetFiles($"{fileName}*.cs", SearchOption.AllDirectories); int resultCount = results.Count; LH.Write( $"\r{resultCount} Class{(resultCount == 1 ? "" : "es")} Generated for {fileName}\t\t\t\t\t"); return results; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
//public void ProcessDirectory(string directoryPath /*, Dictionary<ImportFileType, string> fileTypes = null*/) //{ // try // { // DirectoryInfo directory = new DirectoryInfo(directoryPath); // if (!directory.Exists) // throw new Exception("The specified directory does not exist!"); // SetSimpleLogging(); // LH.Write($"\r{DateTime.Now} | Processing all files in {directory.Name}\t\t\t\t\t"); // //IReadOnlyCollection<IGrouping<string, FileInfo>> fileGroups = directory.GetFiles("*", SearchOption.AllDirectories) // // .OrderBy(o => o.Name.Split('_')[3]) // // .ThenByDescending(o => o.Name.Split('_')[2].ToLowerInvariant().Equals("us")) // // .ThenByDescending(t => t.Name.Split('_')[2].Length == 2) // // .ThenByDescending(t => t.Name.Split('_')[2].Length > 2) // // .ThenBy(t => t.Name.Split('_')[3]) // // .ThenBy(t => t.Length) // // .GroupBy(g => g.Name.Split('.')[0]) // // .ToArray(); // IReadOnlyCollection<IGrouping<string, FileInfo>> fileGroups = directory.GetFiles("*", SearchOption.AllDirectories) // .OrderByDescending(o => o.Name.EndsWith(GetImportFileExtension(ImportFileType.CodePage))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Projection))) // .ThenByDescending(o => o.Name.EndsWith(GetImportFileExtension(ImportFileType.XmlFile))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.XmlSchema))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Attribute))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Index))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Shape))) // // Tiger naming convention ordering // .ThenByDescending(o => o.Name.Split('_')[2].ToLowerInvariant().Equals("us")) // .ThenByDescending(t => t.Name.Split('_')[2].Length.Equals(2)) // .ThenByDescending(t => t.Name.Split('_')[2].Length > 2) // .ThenBy(t => t.Name.Split('_')[3]) // .ThenBy(t => t.Name) // .ThenBy(t => t.Length) // //.GroupBy(g => g.Name.Split('_')[3]) // state, county, city, place, etc (Tiger naming convention) IE: a_b_state*, a_b_city* // .GroupBy(g => g.Name.Replace(GetImportFileExtension(GetImportFileType(g)), "").Trim('.').Trim()) // Import File grouping by name IE: a.xml, a.dbf, a.shp // .ToArray(); // if (!fileGroups.Any() // || !(fileGroups.Sum(s => s.Count()) > 0)) // throw new FileNotFoundException( // "No files/groups were found in the specified directory. Please check the directory and try again."); // foreach (IGrouping<string, FileInfo> fileGroup in fileGroups.Where(w => !w.Any() || w.Any(a => !a.Exists))) // { // LH.Write($"\rSkipping {fileGroup.Key} as it contained no valid files. (How did this happen?)\t\t\t\t\t"); // } // //Parallel.ForEach(fileGroups.Where(w => w.Any() && w.All(a => a.Exists)), ProcessFileGroup); // //Parallel.ForEach(fileGroups.Where(w => w.Any()), ProcessFileGroupAsync); // foreach (IGrouping<string, FileInfo> fileGroup in fileGroups.Where(w => w.Any())) // { // // Async group processing causes a read from toooooo many files and results take FOREVER to show up // // We like immediate results so lets leave the file groups synchronous and do the individual files asyncronously // ProcessFileGroup(fileGroup); // //ProcessFileGroupAsync(fileGroup); // LH.Write($"\rParsing of {fileGroup.Key} is complete\t\t\t\t\t"); // } // } // catch (Exception e) // { // LH.Error($"\r\n{e.Message}\r\n{e}"); // throw; // } //} public void ProcessFileGroup(IGrouping <string, FileInfo> fileGroup) { try { LH.Write($"\rNow handling {fileGroup.Key} files\t\t\t\t\t"); //Parallel.ForEach(fileGroup, ProcessFile); //Parallel.ForEach(fileGroup, ProcessFileAsync); foreach (FileInfo file in fileGroup) { //ProcessFile(file); ProcessFileAsync(file); } LH.Write($"\r{DateTime.Now} | {fileGroup.Key} has finished parsing\t\t\t\t\t"); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ProcessFile(FileInfo file) { try { if (file == null || !file.Exists) { throw new FileNotFoundException("Please specify a valid file and try the ProcessFile method again."); } LH.Write($"\rProcessing {file.Name}\t\t\t\t\t"); ImportFileType importFileType = GetImportFileType(file); string fileExtension = GetImportFileExtension(importFileType); //LH.Write($"\r{file.Name} is a(n) {importFileType} file with an extension of {fileExtension}\t\t\t\t\t"); switch (importFileType) { case ImportFileType.CodePage: //CodePageFile codePageFile = new CodePageFile(file); break; case ImportFileType.XmlFile: MetadataFile metadataFile = new MetadataFile(file); break; case ImportFileType.Projection: ProjectionFile projectionFile = new ProjectionFile(file); break; case ImportFileType.Attribute: //AttributeFile attributeFile = new AttributeFile(file); break; case ImportFileType.Index: //IndexFile indexFile = new IndexFile(file); break; case ImportFileType.Shape: //ShapeFile shapeFile = new ShapeFile(file); //LH.Write($"\rShapeFile Inserted: {shapeFile.Name}\t\t\t\t\t"); break; case ImportFileType.GeocodingIndex: case ImportFileType.ODBGeocodingIndex: throw new NotImplementedException( "We currently do not handle the processing of Geocoding Indexes or ODB Geocoding indexes. (We make our own in SQL Server)"); case ImportFileType.XmlSchema: LH.WriteLine( $"\rNo data is contained within {file.Name}. This application generates and utilizes it's own schema XML schema documentation. No actions performed with this file."); break; default: throw new NotImplementedException( $"{file.Extension} is not a supported file type. Extensions must match the ESRI Shapefile specifications."); } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (BinaryReader br = new BinaryReader(file.OpenRead())) { long streamLength = br.BaseStream.Length; FileCode = NumericsHelper.ReverseInt(br.ReadInt32()); for (int i = 0; i < 5; i++) { br.ReadInt32(); // Skip 5 empty Integer (4-byte) slots } ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()); // Big Endian, Reverse for actual value FileVersion = br.ReadInt32(); ShapeType = (ShapeType)br.ReadInt32(); XMin = br.ReadDouble(); YMin = br.ReadDouble(); XMax = br.ReadDouble(); YMax = br.ReadDouble(); ZMin = br.ReadDouble(); ZMax = br.ReadDouble(); MMin = br.ReadDouble(); MMax = br.ReadDouble(); int rowsAffected; using (ShapefileEntities db = new ShapefileEntities()) { db.Entry(this).State = EntityState.Added; rowsAffected = db.SaveChanges(); } if (!(rowsAffected > 0) || !(Id > 0)) { throw new Exception( "The index file was not added to the database properly. No ID is present to assign to the child index records. Unable to proceed!"); } List <ShapeIndex> shapeIndices = new List <ShapeIndex>(); int counter = 0; while (br.PeekChar() > -1) { LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); shapeIndices.Add(new ShapeIndex { IndexFileId = Id, RecordNumber = ++counter, Offset = NumericsHelper.ReverseInt(br.ReadInt32()), ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()) }); } LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); using (SqlBulkCopy sbc = new SqlBulkCopy(DataHelper.DefaultConnectionString)) { sbc.BatchSize = DataHelper.DefaultBatchSize; sbc.BulkCopyTimeout = DataHelper.DefaultTimeoutSeconds; sbc.DestinationTableName = "ShapeIndex"; sbc.EnableStreaming = true; sbc.SqlRowsCopied += DataHelper.SqlBulkCopy_SqlRowsCopied; sbc.NotifyAfter = DataHelper.DefaultBatchSize; sbc.ColumnMappings.Add("Id", "Id"); sbc.ColumnMappings.Add("IndexFileId", "IndexFileId"); sbc.ColumnMappings.Add("RecordNumber", "RecordNumber"); sbc.ColumnMappings.Add("Offset", "Offset"); sbc.ColumnMappings.Add("ContentLength", "ContentLength"); try { DataTable shapeIndicesData = DataHelper.CreateDataTable(shapeIndices); sbc.WriteToServerAsync(shapeIndicesData); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } finally { sbc.Close(); } } } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { // TODO: Delete all records that pertain to this file using (BinaryReader br = new BinaryReader(file.OpenRead())) { long streamLength = br.BaseStream.Length; LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); FileCode = NumericsHelper.ReverseInt(br.ReadInt32()); for (int i = 0; i < 5; i++) { br.ReadInt32(); // Skip 5 empty Integer (4-byte) slots } ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()); // Big Endian, Reverse for actual value FileVersion = br.ReadInt32(); ShapeType = (ShapeType)br.ReadInt32(); XMin = br.ReadDouble(); YMin = br.ReadDouble(); XMax = br.ReadDouble(); YMax = br.ReadDouble(); ZMin = br.ReadDouble(); ZMax = br.ReadDouble(); MMin = br.ReadDouble(); MMax = br.ReadDouble(); int rowsAffected; using (ShapefileEntities db = new ShapefileEntities()) { db.Entry(this).State = EntityState.Added; rowsAffected = db.SaveChanges(); } if (rowsAffected > 0 && Id > 0) { List <Shape> shapes = new List <Shape>(); while (br.PeekChar() > -1) { LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); shapes.Add(new Shape(Id, ShapeType, br)); } LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); using (SqlBulkCopy sbc = new SqlBulkCopy(DataHelper.DefaultConnectionString)) { sbc.BatchSize = DataHelper.DefaultBatchSize; sbc.BulkCopyTimeout = DataHelper.DefaultTimeoutSeconds; sbc.DestinationTableName = "Shape"; sbc.EnableStreaming = true; sbc.SqlRowsCopied += DataHelper.SqlBulkCopy_SqlRowsCopied; sbc.NotifyAfter = 250; sbc.ColumnMappings.Add("ShapeFileId", "ShapeFileId"); sbc.ColumnMappings.Add("ShapeType", "ShapeType"); sbc.ColumnMappings.Add("RecordNumber", "RecordNumber"); sbc.ColumnMappings.Add("ContentLength", "ContentLength"); sbc.ColumnMappings.Add("XMin", "XMin"); sbc.ColumnMappings.Add("YMin", "YMin"); sbc.ColumnMappings.Add("XMax", "XMax"); sbc.ColumnMappings.Add("YMax", "YMax"); sbc.ColumnMappings.Add("ZMin", "ZMin"); sbc.ColumnMappings.Add("ZMax", "ZMax"); sbc.ColumnMappings.Add("MMin", "MMin"); sbc.ColumnMappings.Add("MMax", "MMax"); sbc.ColumnMappings.Add("NumberOfParts", "NumberOfParts"); sbc.ColumnMappings.Add("NumberOfPoints", "NumberOfPoints"); sbc.ColumnMappings.Add("DTGeometry", "Geometry"); try { DataTable shapesData = DataHelper.CreateDataTable(shapes); sbc.WriteToServerAsync(shapesData); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } finally { sbc.Close(); } } } else { throw new FileLoadException("The ShapeFile record failed to save properly or doesn't have a valid ID"); } } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }