public void SetSimpleLogging() { using (ShapefileEntities db = new ShapefileEntities()) { db.Database.Initialize(true); } using (SqlConnection conn = new SqlConnection(DataHelper.DefaultConnectionString)) { using (SqlCommand cmd = conn.CreateCommand()) { cmd.CommandTimeout = DataHelper.DefaultTimeoutSeconds; cmd.CommandType = CommandType.Text; cmd.CommandText = $"ALTER DATABASE {conn.Database} SET RECOVERY SIMPLE"; try { conn.Open(); cmd.ExecuteNonQuery(); conn.Close(); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } finally { conn.Close(); } } } }
public static IReadOnlyCollection<FileInfo> GenerateSchema(FileInfo xmlFile) { try { LH.Write($"\rGenerating Schema (FileInfo): {xmlFile.Name}\t\t\t\t\t"); IReadOnlyCollection<string> args = GetXSDArguments(xmlFile); bool result = CallXSD(args); if (!result) throw new FileLoadException($"The call to XSD failed on the file:\r\n{xmlFile.FullName}"); ImportFileType importType = ESRIHelper.GetImportFileType(xmlFile); string fileExtension = ESRIHelper.GetImportFileExtension(importType); string fileName = xmlFile.Name.Replace(fileExtension, "") .Trim('.') .Trim(); IReadOnlyCollection<FileInfo> results = WorkingDirectory.GetFiles($"{fileName}*{ESRIHelper.XmlSchemaExtension}", SearchOption.AllDirectories); return results; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static bool ProcessFile(FileInfo xmlFile) { try { LH.Write($"\rProcessing {xmlFile.Name} as an XML file\t\t\t\t\t"); IReadOnlyCollection<FileInfo> schemas = GenerateSchema(xmlFile); if (!schemas.Any()) //throw new FileLoadException($"FAILED TO LOAD XML SCHEMA: {xmlFile.Name}"); // TODO: GET THE XSLT(?) TRANSFORMATIONS WORKING SO WE CAN GET THE DBF.*.XML FILES PROCESSED AS WELL return false; IReadOnlyCollection<FileInfo> classes = GenerateClass(schemas); if (classes == null || !classes.Any()) return false; // TODO: Figure out and implement what to do with the class files at this point! //Parallel.ForEach(schemas, schema => schema.Delete()); return true; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ProcessDirectory(DirectoryInfo directory, Dictionary <ImportFileType, string> fileTypes = null) { try { if (directory == null || !directory.Exists) { throw new Exception( "The specified directory does not exist! Unable to process a directory from nothing or a non-existent path."); } LH.WriteLine($"\r{DateTime.Now} | Processing files in {directory.Name}\t\t\t\t\t"); fileTypes = fileTypes ?? FileTypes; IReadOnlyCollection <FileInfo> files; // Kept separate from first definition so we can comment out certain sections to work on only what we want //files = directory.GetFiles($"*{fileTypes[ImportFileType.CodePage]}"); //if (!files.Any()) // LH.WriteLine($"\rNo Code Page Files were found. Defaulting to {DefaultCodePage.Encoding.EncodingName}\t\t\t\t\t"); //else // foreach (FileInfo file in files) // CodePageFiles.Add(new CodePageFile(file)); //files = directory.GetFiles($"*{fileTypes[ImportFileType.Projection]}"); //if (!files.Any()) // LH.WriteLine($"\rNo Projection Files were found. Defaulting to SRID {DefaultSRID}\t\t\t\t\t"); //else // foreach (FileInfo file in files) // ProjectionFiles.Add(new ProjectionFile(file)); files = directory.GetFiles($"*{fileTypes[ImportFileType.XmlFile]}").OrderBy(o => o.Name).ToArray(); if (!files.Any()) { throw new Exception($"\rNo Metadata Files were found! Unable to generate appropriate schemas!"); } foreach (FileInfo file in files) { MetadataFiles.Add(new MetadataFile(file)); } //Parallel.ForEach(files, f => MetadataFiles.Add(new MetadataFile(f))); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static bool ProcessFile(string xmlFilePath) { try { return ProcessFile(new FileInfo(xmlFilePath)); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static async Task<bool> ProcessFileAsync(FileInfo xmlFile) { try { Task<bool> task = Task.Run(() => ProcessFile(xmlFile)); return await task; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static async Task<IReadOnlyCollection<FileInfo>> GenerateClassAsync(IReadOnlyCollection<FileInfo> schemaFiles) { try { Task<IReadOnlyCollection<FileInfo>> task = Task.Run(() => GenerateClass(schemaFiles)); return await task; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public async void ProcessFileGroupAsync(IGrouping <string, FileInfo> fileGroup) { try { Task task = Task.Run(() => ProcessFileGroup(fileGroup)); await task; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateClass(IReadOnlyCollection<string> schemaFilePaths) { try { LH.Write($"\rGenerating Class (string)\t\t\t\t\t"); return GenerateClass(schemaFilePaths.Select(s => new FileInfo(s)).ToArray()); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static async Task<IReadOnlyCollection<FileInfo>> GenerateSchemaAsync(string xmlFilePath) { try { Task<IReadOnlyCollection<FileInfo>> task = Task.Run(() => GenerateSchema(xmlFilePath)); return await task; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public async void ProcessFileAsync(FileInfo file) { try { Task task = Task.Run(() => ProcessFile(file)); await task; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateSchema(string xmlFilePath) { try { LH.Write($"\rGenerating Schema (string): {xmlFilePath}\t\t\t\t\t"); return GenerateSchema(new FileInfo(xmlFilePath)); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (StreamReader sr = new StreamReader(file.OpenRead())) { EncodingValue = sr.ReadToEnd(); LH.Write($"\r{file.Name.Split('.')[0]} has an encoding type of {EncodingValue}\t\t\t\t\t"); } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public async Task <int> ImportData <TContext, TData>(TContext context, TData data) where TContext : DbContext where TData : class { try { context.Entry(data).State = EntityState.Added; context.Set <TData>().Add(data); return(await context.SaveChangesAsync()); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (StreamReader sr = new StreamReader(file.FullName)) { WellKnownText = sr.ReadToEnd(); sr.Close(); } LH.Write($"\r{file.Name.Split('.')[0]} has an SRID of {SRID}\t\t\t\t\t"); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ProcessDirectory(string directoryPath, Dictionary <ImportFileType, string> fileTypes = null) { try { if (string.IsNullOrWhiteSpace(directoryPath)) { throw new ArgumentException( $"\"{directoryPath}\" is an invalid directory path.\r\nThe specified path cannot be empty or null.\r\nPlease specify a valid file path and try again."); } ProcessDirectory(new DirectoryInfo(directoryPath)); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static ImportFileType GetImportFileType(FileInfo file) { try { foreach ( KeyValuePair <ImportFileType, string> type in FileTypes.Where(type => file.Name.ToLowerInvariant().EndsWith(type.Value.ToLowerInvariant()))) { return(type.Key); } throw new ArgumentException("The file specified is not a valid import file!"); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static IReadOnlyCollection<FileInfo> GenerateClass(IReadOnlyCollection<FileInfo> schemaFiles) { try { IReadOnlyCollection<string> args = GetXSDArguments(schemaFiles, ImportFileType.XmlSchema, true); List<string> cleanArgs = args.ToList(); cleanArgs.Remove(DatasetArgument); args = cleanArgs.AsReadOnly(); FileInfo firstSchemaFile = schemaFiles.FirstOrDefault(); if (firstSchemaFile == null || !firstSchemaFile.Exists) throw new FileNotFoundException("The first schema file in the collection does not exist or was not found"); ImportFileType importType = ESRIHelper.GetImportFileType(firstSchemaFile); string fileExtension = ESRIHelper.GetImportFileExtension(importType); string fileName = firstSchemaFile.Name.Replace(fileExtension, "") .Trim('.') .Replace(".", "_") .Trim(); LH.Write($"\rGenerating Classes (FileInfo): {fileName}\t\t\t\t\t"); bool result = CallXSD(args); if (!result) throw new FileLoadException( $"The call to XSD failed on the files:\r\n{fileName}"); IReadOnlyCollection<FileInfo> results = OutputDirectory.GetFiles($"{fileName}*.cs", SearchOption.AllDirectories); int resultCount = results.Count; LH.Write( $"\r{resultCount} Class{(resultCount == 1 ? "" : "es")} Generated for {fileName}\t\t\t\t\t"); return results; } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public static string GetImportFileExtension(ImportFileType fileType) { try { string value = FileTypes[fileType]; if (value == null) { throw new ArgumentException("The file specified is not a valid import file!"); } return(value); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
//public void ProcessDirectory(string directoryPath /*, Dictionary<ImportFileType, string> fileTypes = null*/) //{ // try // { // DirectoryInfo directory = new DirectoryInfo(directoryPath); // if (!directory.Exists) // throw new Exception("The specified directory does not exist!"); // SetSimpleLogging(); // LH.Write($"\r{DateTime.Now} | Processing all files in {directory.Name}\t\t\t\t\t"); // //IReadOnlyCollection<IGrouping<string, FileInfo>> fileGroups = directory.GetFiles("*", SearchOption.AllDirectories) // // .OrderBy(o => o.Name.Split('_')[3]) // // .ThenByDescending(o => o.Name.Split('_')[2].ToLowerInvariant().Equals("us")) // // .ThenByDescending(t => t.Name.Split('_')[2].Length == 2) // // .ThenByDescending(t => t.Name.Split('_')[2].Length > 2) // // .ThenBy(t => t.Name.Split('_')[3]) // // .ThenBy(t => t.Length) // // .GroupBy(g => g.Name.Split('.')[0]) // // .ToArray(); // IReadOnlyCollection<IGrouping<string, FileInfo>> fileGroups = directory.GetFiles("*", SearchOption.AllDirectories) // .OrderByDescending(o => o.Name.EndsWith(GetImportFileExtension(ImportFileType.CodePage))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Projection))) // .ThenByDescending(o => o.Name.EndsWith(GetImportFileExtension(ImportFileType.XmlFile))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.XmlSchema))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Attribute))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Index))) // .ThenByDescending(t => t.Name.EndsWith(GetImportFileExtension(ImportFileType.Shape))) // // Tiger naming convention ordering // .ThenByDescending(o => o.Name.Split('_')[2].ToLowerInvariant().Equals("us")) // .ThenByDescending(t => t.Name.Split('_')[2].Length.Equals(2)) // .ThenByDescending(t => t.Name.Split('_')[2].Length > 2) // .ThenBy(t => t.Name.Split('_')[3]) // .ThenBy(t => t.Name) // .ThenBy(t => t.Length) // //.GroupBy(g => g.Name.Split('_')[3]) // state, county, city, place, etc (Tiger naming convention) IE: a_b_state*, a_b_city* // .GroupBy(g => g.Name.Replace(GetImportFileExtension(GetImportFileType(g)), "").Trim('.').Trim()) // Import File grouping by name IE: a.xml, a.dbf, a.shp // .ToArray(); // if (!fileGroups.Any() // || !(fileGroups.Sum(s => s.Count()) > 0)) // throw new FileNotFoundException( // "No files/groups were found in the specified directory. Please check the directory and try again."); // foreach (IGrouping<string, FileInfo> fileGroup in fileGroups.Where(w => !w.Any() || w.Any(a => !a.Exists))) // { // LH.Write($"\rSkipping {fileGroup.Key} as it contained no valid files. (How did this happen?)\t\t\t\t\t"); // } // //Parallel.ForEach(fileGroups.Where(w => w.Any() && w.All(a => a.Exists)), ProcessFileGroup); // //Parallel.ForEach(fileGroups.Where(w => w.Any()), ProcessFileGroupAsync); // foreach (IGrouping<string, FileInfo> fileGroup in fileGroups.Where(w => w.Any())) // { // // Async group processing causes a read from toooooo many files and results take FOREVER to show up // // We like immediate results so lets leave the file groups synchronous and do the individual files asyncronously // ProcessFileGroup(fileGroup); // //ProcessFileGroupAsync(fileGroup); // LH.Write($"\rParsing of {fileGroup.Key} is complete\t\t\t\t\t"); // } // } // catch (Exception e) // { // LH.Error($"\r\n{e.Message}\r\n{e}"); // throw; // } //} public void ProcessFileGroup(IGrouping <string, FileInfo> fileGroup) { try { LH.Write($"\rNow handling {fileGroup.Key} files\t\t\t\t\t"); //Parallel.ForEach(fileGroup, ProcessFile); //Parallel.ForEach(fileGroup, ProcessFileAsync); foreach (FileInfo file in fileGroup) { //ProcessFile(file); ProcessFileAsync(file); } LH.Write($"\r{DateTime.Now} | {fileGroup.Key} has finished parsing\t\t\t\t\t"); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { // TODO: Delete all records that pertain to this file using (BinaryReader br = new BinaryReader(file.OpenRead())) { long streamLength = br.BaseStream.Length; LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); FileCode = NumericsHelper.ReverseInt(br.ReadInt32()); for (int i = 0; i < 5; i++) { br.ReadInt32(); // Skip 5 empty Integer (4-byte) slots } ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()); // Big Endian, Reverse for actual value FileVersion = br.ReadInt32(); ShapeType = (ShapeType)br.ReadInt32(); XMin = br.ReadDouble(); YMin = br.ReadDouble(); XMax = br.ReadDouble(); YMax = br.ReadDouble(); ZMin = br.ReadDouble(); ZMax = br.ReadDouble(); MMin = br.ReadDouble(); MMax = br.ReadDouble(); int rowsAffected; using (ShapefileEntities db = new ShapefileEntities()) { db.Entry(this).State = EntityState.Added; rowsAffected = db.SaveChanges(); } if (rowsAffected > 0 && Id > 0) { List <Shape> shapes = new List <Shape>(); while (br.PeekChar() > -1) { LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); shapes.Add(new Shape(Id, ShapeType, br)); } LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); using (SqlBulkCopy sbc = new SqlBulkCopy(DataHelper.DefaultConnectionString)) { sbc.BatchSize = DataHelper.DefaultBatchSize; sbc.BulkCopyTimeout = DataHelper.DefaultTimeoutSeconds; sbc.DestinationTableName = "Shape"; sbc.EnableStreaming = true; sbc.SqlRowsCopied += DataHelper.SqlBulkCopy_SqlRowsCopied; sbc.NotifyAfter = 250; sbc.ColumnMappings.Add("ShapeFileId", "ShapeFileId"); sbc.ColumnMappings.Add("ShapeType", "ShapeType"); sbc.ColumnMappings.Add("RecordNumber", "RecordNumber"); sbc.ColumnMappings.Add("ContentLength", "ContentLength"); sbc.ColumnMappings.Add("XMin", "XMin"); sbc.ColumnMappings.Add("YMin", "YMin"); sbc.ColumnMappings.Add("XMax", "XMax"); sbc.ColumnMappings.Add("YMax", "YMax"); sbc.ColumnMappings.Add("ZMin", "ZMin"); sbc.ColumnMappings.Add("ZMax", "ZMax"); sbc.ColumnMappings.Add("MMin", "MMin"); sbc.ColumnMappings.Add("MMax", "MMax"); sbc.ColumnMappings.Add("NumberOfParts", "NumberOfParts"); sbc.ColumnMappings.Add("NumberOfPoints", "NumberOfPoints"); sbc.ColumnMappings.Add("DTGeometry", "Geometry"); try { DataTable shapesData = DataHelper.CreateDataTable(shapes); sbc.WriteToServerAsync(shapesData); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } finally { sbc.Close(); } } } else { throw new FileLoadException("The ShapeFile record failed to save properly or doesn't have a valid ID"); } } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { //XmlSerializer serializer = new XmlSerializer(typeof(metadata)); //metadata data = (metadata)serializer.Deserialize(file.OpenRead()); //foreach (object item in data.Items) //{ // Type itemType = item.GetType(); // Console.WriteLine($"{itemType} | {item}"); //} XmlDocument document = new XmlDocument(); document.Load(file.FullName); string jsonTest = JsonConvert.SerializeXmlNode(document); object jsonObject = JsonConvert.DeserializeObject(jsonTest); //Console.WriteLine(jsonTest); XmlReader reader = XmlReader.Create(file.FullName); while (reader.Read()) { string name = reader.Name; string baseUri = reader.BaseURI; string localName = reader.LocalName; string namespaceUri = reader.NamespaceURI; XmlNodeType nodeType = reader.NodeType; string prefix = reader.Prefix; IXmlSchemaInfo schemaInfo = reader.SchemaInfo; string value = reader.Value; bool hasAttributes = reader.HasAttributes; int attributeCount = reader.AttributeCount; bool canReadBinaryContent = reader.CanReadBinaryContent; bool canReadValueChunk = reader.CanReadValueChunk; bool canResolveEntity = reader.CanResolveEntity; int depth = reader.Depth; bool isEoF = reader.EOF; bool hasValue = reader.HasValue; bool isDefault = reader.IsDefault; bool isEmptyElement = reader.IsEmptyElement; XmlNameTable nameTable = reader.NameTable; char quoteChar = reader.QuoteChar; ReadState readState = reader.ReadState; XmlReaderSettings settings = reader.Settings; Type valueType = reader.ValueType; string xmlLang = reader.XmlLang; XmlSpace xmlSpace = reader.XmlSpace; switch (reader.NodeType) { case XmlNodeType.XmlDeclaration: case XmlNodeType.Whitespace: case XmlNodeType.EndElement: case XmlNodeType.Comment: continue; //case XmlNodeType.Element: //case XmlNodeType.Text: //case XmlNodeType.Attribute: //case XmlNodeType.CDATA: //case XmlNodeType.EntityReference: //case XmlNodeType.Entity: //case XmlNodeType.ProcessingInstruction: //case XmlNodeType.Document: //case XmlNodeType.DocumentType: //case XmlNodeType.DocumentFragment: //case XmlNodeType.Notation: //case XmlNodeType.SignificantWhitespace: //case XmlNodeType.EndEntity: // break; case XmlNodeType.None: Console.WriteLine("Why does this node have a type of 'None'?"); break; default: Console.WriteLine($"\r{nodeType} | {name} | {value}"); break; } if (reader.HasAttributes) { Dictionary <string, string> results = new Dictionary <string, string>(); while (reader.MoveToNextAttribute()) { results.Add(reader.Name, reader.Value); } foreach (KeyValuePair <string, string> result in results) { Console.WriteLine($"{result.Key} | {result.Value}"); } } } Debugger.Break(); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { DataSet rawResults = new DataSet(); DataHelper.FillDataSetFromDbf(file, ref rawResults); // TODO: PROCESS THE DATASET INTO USABLE DATA //foreach (DataTable table in rawResults.Tables) //{ // LH.WriteLine($"\rTABLE: {table.TableName}" + // $"\r\nCase Sensitive: {table.CaseSensitive}" + // $"\r\nChild Relations: {table.ChildRelations}" + // $"\r\nColumns: {table.Columns}" + // $"\r\nConstraints: {table.Constraints}" + // $"\r\nDataset: {table.DataSet}" + // $"\r\nDefault View: {table.DefaultView}" + // $"\r\nDisplay Expression: {table.DisplayExpression}" + // $"\r\nExtended Properties: {table.ExtendedProperties}" + // $"\r\nHas Errors: {table.HasErrors}" + // $"\r\nIs Initialized: {table.IsInitialized}" + // $"\r\nLocale: {table.Locale}" + // $"\r\nMinimum Capacity: {table.MinimumCapacity}" + // $"\r\nNamespace: {table.Namespace}" + // $"\r\nParent Relations: {table.ParentRelations}" + // $"\r\nPrefix: {table.Prefix}" + // $"\r\nPrimary Key: {table.PrimaryKey}" + // $"\r\nRemoting Format: {table.RemotingFormat}" + // $"\r\nRows: {table.Rows}" + // $"\r\nSite: {table.Site}"); // foreach (DataColumn column in table.Columns) // { // LH.WriteLine($"\rCOLUMN: {column.ColumnName}" + // $"\r\nAllow DBNull: {column.AllowDBNull}" + // $"\r\nAuto Increment: {column.AutoIncrement}" + // $"\r\nAuto Increment Seed: {column.AutoIncrementSeed}" + // $"\r\nAuto Increment Step: {column.AutoIncrementStep}" + // $"\r\nCaption: {column.Caption}" + // $"\r\nColumnMapping: {column.ColumnMapping}" + // $"\r\nDatatype: {column.DataType}" + // $"\r\nDateTime Mode: {column.DateTimeMode}" + // $"\r\nDefault Value: {column.DefaultValue}" + // $"\r\nExpression: {column.Expression}" + // $"\r\nExtended Properties: {column.ExtendedProperties}" + // $"\r\nMax Length: {column.MaxLength}" + // $"\r\nNamespace: {column.Namespace}" + // $"\r\nOrdinal: {column.Ordinal}" + // $"\r\nPrefix: {column.Prefix}" + // $"\r\nReadonly: {column.ReadOnly}" + // $"\r\nTable: {column.Table}" + // $"\r\nUnique: {column.Unique}"); // } // foreach (DataRow row in table.Rows) // { // LH.WriteLine($"Row:" + // $"\r\nHas Errors: {row.HasErrors}" + // $"\r\nItem Array: {row.ItemArray}" + // $"\r\nRow Error: {row.RowError}" + // $"\r\nRow State: {row.RowState}" + // $"\r\nTable: {row.Table}" + // $"\r\nValues: {string.Join(" | ", row.ItemArray)}"); // } //} } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ImportFromFile(FileInfo file) { try { using (BinaryReader br = new BinaryReader(file.OpenRead())) { long streamLength = br.BaseStream.Length; FileCode = NumericsHelper.ReverseInt(br.ReadInt32()); for (int i = 0; i < 5; i++) { br.ReadInt32(); // Skip 5 empty Integer (4-byte) slots } ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()); // Big Endian, Reverse for actual value FileVersion = br.ReadInt32(); ShapeType = (ShapeType)br.ReadInt32(); XMin = br.ReadDouble(); YMin = br.ReadDouble(); XMax = br.ReadDouble(); YMax = br.ReadDouble(); ZMin = br.ReadDouble(); ZMax = br.ReadDouble(); MMin = br.ReadDouble(); MMax = br.ReadDouble(); int rowsAffected; using (ShapefileEntities db = new ShapefileEntities()) { db.Entry(this).State = EntityState.Added; rowsAffected = db.SaveChanges(); } if (!(rowsAffected > 0) || !(Id > 0)) { throw new Exception( "The index file was not added to the database properly. No ID is present to assign to the child index records. Unable to proceed!"); } List <ShapeIndex> shapeIndices = new List <ShapeIndex>(); int counter = 0; while (br.PeekChar() > -1) { LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); shapeIndices.Add(new ShapeIndex { IndexFileId = Id, RecordNumber = ++counter, Offset = NumericsHelper.ReverseInt(br.ReadInt32()), ContentLength = NumericsHelper.ReverseInt(br.ReadInt32()) }); } LH.Write(StringHelper.GetProgressString(br.BaseStream.Position, streamLength, file.Name)); using (SqlBulkCopy sbc = new SqlBulkCopy(DataHelper.DefaultConnectionString)) { sbc.BatchSize = DataHelper.DefaultBatchSize; sbc.BulkCopyTimeout = DataHelper.DefaultTimeoutSeconds; sbc.DestinationTableName = "ShapeIndex"; sbc.EnableStreaming = true; sbc.SqlRowsCopied += DataHelper.SqlBulkCopy_SqlRowsCopied; sbc.NotifyAfter = DataHelper.DefaultBatchSize; sbc.ColumnMappings.Add("Id", "Id"); sbc.ColumnMappings.Add("IndexFileId", "IndexFileId"); sbc.ColumnMappings.Add("RecordNumber", "RecordNumber"); sbc.ColumnMappings.Add("Offset", "Offset"); sbc.ColumnMappings.Add("ContentLength", "ContentLength"); try { DataTable shapeIndicesData = DataHelper.CreateDataTable(shapeIndices); sbc.WriteToServerAsync(shapeIndicesData); } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } finally { sbc.Close(); } } } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }
public void ProcessFile(FileInfo file) { try { if (file == null || !file.Exists) { throw new FileNotFoundException("Please specify a valid file and try the ProcessFile method again."); } LH.Write($"\rProcessing {file.Name}\t\t\t\t\t"); ImportFileType importFileType = GetImportFileType(file); string fileExtension = GetImportFileExtension(importFileType); //LH.Write($"\r{file.Name} is a(n) {importFileType} file with an extension of {fileExtension}\t\t\t\t\t"); switch (importFileType) { case ImportFileType.CodePage: //CodePageFile codePageFile = new CodePageFile(file); break; case ImportFileType.XmlFile: MetadataFile metadataFile = new MetadataFile(file); break; case ImportFileType.Projection: ProjectionFile projectionFile = new ProjectionFile(file); break; case ImportFileType.Attribute: //AttributeFile attributeFile = new AttributeFile(file); break; case ImportFileType.Index: //IndexFile indexFile = new IndexFile(file); break; case ImportFileType.Shape: //ShapeFile shapeFile = new ShapeFile(file); //LH.Write($"\rShapeFile Inserted: {shapeFile.Name}\t\t\t\t\t"); break; case ImportFileType.GeocodingIndex: case ImportFileType.ODBGeocodingIndex: throw new NotImplementedException( "We currently do not handle the processing of Geocoding Indexes or ODB Geocoding indexes. (We make our own in SQL Server)"); case ImportFileType.XmlSchema: LH.WriteLine( $"\rNo data is contained within {file.Name}. This application generates and utilizes it's own schema XML schema documentation. No actions performed with this file."); break; default: throw new NotImplementedException( $"{file.Extension} is not a supported file type. Extensions must match the ESRI Shapefile specifications."); } } catch (Exception e) { LH.Error($"\r\n{e.Message}\r\n{e}"); throw; } }