示例#1
0
        private static Task ParseAttributes(object state)
        {
            var taskState = (ParseDataTaskState)state;
            var cancellationTokenSource = taskState.CancellationTokenSource;
            var token = cancellationTokenSource.Token;

            var task = Task.Factory.StartNew(
                async() =>
            {
                using (var dataStream = taskState.Stream.CloneStream())
                {
                    int start            = taskState.Start;
                    int end              = taskState.End;
                    DbfHeader dbfHeader  = taskState.DbfHeader;
                    var shapeModels      = taskState.ShapeModels;
                    var valueConverter   = taskState.ValueConverter;
                    var attributesToLoad = taskState.AttributesToLoad;

                    dataStream.Seek((ulong)(dbfHeader.RecordsOffset + start * dbfHeader.RecordLength));

                    for (int i = start; i < end; i++)
                    {
                        if (cancellationTokenSource.IsCancellationRequested)
                        {
                            return;
                        }

                        var shapeModel = shapeModels[i] as MapShapeModel;
                        byte[] record  = new byte[dbfHeader.RecordLength];
                        await dataStream.ReadAsync(record.AsBuffer(), (uint)dbfHeader.RecordLength, InputStreamOptions.Partial);

                        // Data records are preceded by one byte; that is, a space (20H) if the record is not deleted, an asterisk (2AH) if the record is deleted.
                        int offset = 1;
                        foreach (var field in dbfHeader.Fields)
                        {
                            if (attributesToLoad == null || attributesToLoad.Contains(field.Name))
                            {
                                string value         = dbfHeader.Encoding.GetString(record, offset, field.Length);
                                object propertyValue = TransformDbfValue(field, value, valueConverter);

                                shapeModel.Attributes[field.Name] = propertyValue;
                            }

                            offset += field.Length;
                        }
                    }
                }
            },
                token).Unwrap();

            return(task);
        }
        private static Task <DbfHeader> BuildDbfHeaderData(IRandomAccessStream stream, Encoding encoding, CancellationTokenSource cancellationTokenSource)
        {
            var token = cancellationTokenSource.Token;

            var task = Task.Factory.StartNew(async() =>
            {
                if (cancellationTokenSource.IsCancellationRequested)
                {
                    return(null);
                }

                if (stream.Size < 32)
                {
                    throw new NotSupportedException(InvalidFormat);
                }

                using (var dataStream = stream.CloneStream())
                {
                    byte[] header = new byte[32];
                    await dataStream.ReadAsync(header.AsBuffer(), 32u, InputStreamOptions.Partial);

                    byte fileType = header[0];
                    if (!AllowedTypes.Contains(fileType))
                    {
                        throw new NotSupportedException(InvalidFormat);
                    }

                    DbfHeader dbfHeader     = new DbfHeader();
                    dbfHeader.RecordsCount  = BitConverter.ToInt32(header, 4);
                    dbfHeader.RecordsOffset = BitConverter.ToInt16(header, 8);
                    dbfHeader.RecordLength  = BitConverter.ToInt16(header, 10);

                    if (encoding == null)
                    {
                        byte languageDriver = header[29];
                        encoding            = DbfEncoding.GetEncoding(languageDriver);
                    }

                    dbfHeader.Encoding = encoding;

                    // header is 32 bytes + n field descriptors * 32 bytes + carriage return byte (0x0D)
                    int fieldDescriptorCount = (dbfHeader.RecordsOffset - 32 - 1) / 32;
                    byte[] fieldDescriptor;
                    DbfFieldInfo dbfField;
                    for (int i = 0; i < fieldDescriptorCount; i++)
                    {
                        if (cancellationTokenSource.IsCancellationRequested)
                        {
                            return(null);
                        }

                        fieldDescriptor = new byte[32];
                        await dataStream.ReadAsync(fieldDescriptor.AsBuffer(), 32u, InputStreamOptions.Partial);

                        dbfField               = new DbfFieldInfo();
                        dbfField.Name          = encoding.GetString(fieldDescriptor, 0, 11).Replace("\0", string.Empty);
                        dbfField.NativeDbfType = (char)fieldDescriptor[11];

                        dbfField.Length       = fieldDescriptor[16];
                        dbfField.DecimalCount = fieldDescriptor[17];

                        dbfHeader.Fields.Add(dbfField);
                    }

                    return(dbfHeader);
                }
            },
                                             token).Unwrap();

            return(task);
        }
        private MapShapeModelCollection ProcessDataItems(MapShapeModelCollection shapeModels, DbfHeader dbfHeader, IRandomAccessStream dataStream, CancellationTokenSource cancellationTokenSource)
        {
            if (cancellationTokenSource.IsCancellationRequested)
            {
                return(null);
            }

            var token = cancellationTokenSource.Token;

            int itemCount = dbfHeader.RecordsCount;
            int maxDegreeOfParallelism = Environment.ProcessorCount;

            if (itemCount < maxDegreeOfParallelism)
            {
                maxDegreeOfParallelism = itemCount;
            }

            int remainder  = itemCount % maxDegreeOfParallelism;
            int multiplier = (itemCount / maxDegreeOfParallelism) + (remainder > 0 ? 1 : 0);

            List <Task> tasks = new List <Task>();

            for (int i = 0; i < maxDegreeOfParallelism; i++)
            {
                int start = i * multiplier;
                int end   = Math.Min((i + 1) * multiplier, itemCount);

                var taskState = new ParseDataTaskState()
                {
                    Start                   = start,
                    End                     = end,
                    DbfHeader               = dbfHeader,
                    Stream                  = dataStream,
                    ShapeModels             = shapeModels,
                    CancellationTokenSource = cancellationTokenSource,
                    ValueConverter          = this.AttributeValueConverter,
                    AttributesToLoad        = this.AttributesToLoad
                };
                var task = Task.Factory.StartNew <Task>(ParseAttributes, taskState, token, TaskCreationOptions.AttachedToParent, TaskScheduler.Current).Unwrap();

                tasks.Add(task);
            }

            Task.WaitAll(tasks.ToArray());

            return(shapeModels);
        }