public ActionResult ChunkSave(IEnumerable <HttpPostedFileBase> files, string metaData) { if (metaData == null) { return(Save(files, "")); } MemoryStream ms = new MemoryStream(Encoding.UTF8.GetBytes(metaData)); var serializer = new DataContractJsonSerializer(typeof(ChunkMetaData)); ChunkMetaData somemetaData = serializer.ReadObject(ms) as ChunkMetaData; string path = String.Empty; // The Name of the Upload component is "files" if (files != null) { foreach (var file in files) { //path = Path.Combine(Server.MapPath("~/App_Data"), somemetaData.FileName); //AppendToFile(path, file.InputStream); } } FileResult fileBlob = new FileResult(); fileBlob.uploaded = somemetaData.TotalChunks - 1 <= somemetaData.ChunkIndex; fileBlob.fileUid = somemetaData.UploadUid; return(Json(fileBlob)); }
public ActionResult Upload(string metaData) { if (Request.Files == null || Request.Files.Count < 1) { return(new EmptyResult()); } ChunkMetaData chunkData = null; if (metaData != null) { var serializer = new DataContractJsonSerializer(typeof(ChunkMetaData)); var ms = new MemoryStream(Encoding.UTF8.GetBytes(metaData)); chunkData = serializer.ReadObject(ms) as ChunkMetaData; } var uploadedFiles = storageService.SaveToTemp(Request.Files, chunkData); var uploaded = chunkData == null || chunkData.TotalChunks - 1 <= chunkData.ChunkIndex; return(Json( new { uploaded, fileUid = chunkData?.UploadUid, files = uploaded ? uploadedFiles : null }, JsonRequestBehavior.AllowGet)); }
public ActionResult ChunkSave(List <IFormFile> files, string metaData) { if (metaData == null) { return(Save(files)); } MemoryStream ms = new MemoryStream(Encoding.UTF8.GetBytes(metaData)); var serializer = new DataContractJsonSerializer(typeof(ChunkMetaData)); ChunkMetaData somemetaData = serializer.ReadObject(ms) as ChunkMetaData; string path = String.Empty; // The Name of the Upload component is "files" if (files != null) { foreach (var file in files) { //path = Path.Combine(_webHhostingEnvironment.WebRootPath, "Upload_Directory", somemetaData.FileName); //AppendToFile(path, file.InputStream); } } // Return an empty string to signify success return(Content("")); }
List <List <ChunkMetaData> > DecodePredictionMessage(byte[] buffer, int offset, out uint decodedOffsetInStream) { uint buffer_idx = (uint)offset; uint chainsListSize; List <List <ChunkMetaData> > chainsList; buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Uint(buffer, buffer_idx, out decodedOffsetInStream); buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Uint(buffer, buffer_idx, out chainsListSize); chainsList = new List <List <ChunkMetaData> >((int)chainsListSize); for (int chain_idx = 0; chain_idx < chainsListSize; chain_idx++) { uint chunkListSize; buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Uint(buffer, buffer_idx, out chunkListSize); List <ChunkMetaData> chunkMetaDataList = new List <ChunkMetaData>((int)chunkListSize); for (uint idx = 0; idx < chunkListSize; idx++) { ChunkMetaData chunkMetaData = new ChunkMetaData(); chunkMetaData.hint = buffer[buffer_idx++]; buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Long(buffer, buffer_idx, out chunkMetaData.chunk); chunkMetaDataList.Add(chunkMetaData); } chainsList.Add(chunkMetaDataList); } return(chainsList); }
public async Task <IActionResult> Upload(IEnumerable <IFormFile> files, [FromForm] string metaData) { if (!Request.ContentType.Contains("multipart/form-data")) { return(BadRequest("Content must be mime multipart.")); } var user = await _modelDB.Users.FindAsync(Guid.Parse(User.Claims.Where(x => x.Type == ClaimTypes.NameIdentifier).Select(x => x.Value).FirstOrDefault())); ChunkMetaData metadata = Newtonsoft.Json.JsonConvert.DeserializeObject <ChunkMetaData>(Request.Form["metadata"]); await _fileService.WriteToStreamAsync(files.FirstOrDefault(), metadata); if (!metadata.IsFinalChunck) { return(Json(new UploadChuckResult { fileUid = metadata.UploadUid, uploaded = metadata.IsFinalChunck })); } Guid itemID; if (!Guid.TryParse(Request.Form["ItemID"], out itemID)) { return(BadRequest("The ID of the owning entity for the document was not specified.")); } var document = await _fileService.SaveDocumentToDatabaseAsync(_modelDB, metadata, itemID, user.ID); await _fileService.FinalizeUploadAsync(document, metadata); return(Json(new UploadChuckResult { fileUid = metadata.UploadUid, uploaded = metadata.IsFinalChunck })); }
private ChunkMetaData CreateChunkMetaData(ChunkColumnInfo[] columnsInfo) { ChunkMetaData metaData = new ChunkMetaData(); //Compute the Master Biome for the chunk. metaData.ChunkMasterBiomeType = columnsInfo.GroupBy(item => item.Biome).OrderByDescending(x => x.Count()).First().Key; return(metaData); }
public ChunkMetaData() { this.chunkSize = 16; this.blockSize = 0.5f; this.halfBlockSize = 0.25f; this.blockSizeDiv = 2; this.depth = 4; Instance = this; }
void Start() { this.spawner = this.GetComponent <Spawner> (); this.chunkMetaData = this.GetComponent <World> ().chunkMetaData; this.len = this.chunkMetaData.blockMetaData.Count; this.currentSelectedBlockIndex = 0; this.flag = true; }
/// <summary> /// Will Populate the chunks with various resources /// </summary> /// <param name="ChunkCubes"></param> /// <param name="chunkMetaData"></param> private void PopulateChunk(GeneratedChunk chunk, byte[] chunkData, ChunkMetaData chunkMetaData, FastRandom chunkRnd, EntityFactory entityFactory, List <LandscapeEntity> landscapeEntities) { //Get Chunk Master Biome var masterBiome = _config.ProcessorParam.Biomes[chunkMetaData.ChunkMasterBiomeType]; ByteChunkCursor dataCursor = new ByteChunkCursor(chunkData, chunk.BlockData.ColumnsInfo); masterBiome.GenerateChunkCaverns(dataCursor, chunkRnd); masterBiome.GenerateChunkResources(dataCursor, chunkRnd); chunkMetaData.InitialSpawnableEntitiesAmount = masterBiome.GenerateChunkStaticItems(dataCursor, chunk, masterBiome, chunkRnd, entityFactory, _spawnControler); InsertMicrolandscapeStaticEntities(dataCursor, chunk, chunkRnd, entityFactory, landscapeEntities); }
List <ChunkMetaData> DecodePredictionAckMessage(byte[] buffer, int offset, out uint chunksCount) { uint buffer_idx = (uint)offset; List <ChunkMetaData> chunkMetaDataAndId; buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Uint(buffer, buffer_idx, out chunksCount); chunkMetaDataAndId = new List <ChunkMetaData>((int)chunksCount); for (int idx = 0; idx < chunksCount; idx++) { ChunkMetaData chunkMetaData = new ChunkMetaData(); chunkMetaData.hint = buffer[buffer_idx++]; buffer_idx += ByteArrayScalarTypeConversionLib.ByteArrayScalarTypeConversionLib.ByteArray2Long(buffer, buffer_idx, out chunkMetaData.chunk); chunkMetaDataAndId.Add(chunkMetaData); } return(chunkMetaDataAndId); }
public FileResult ChunkUploadSave(IEnumerable <HttpPostedFileBase> files, string metaData, string uniqueId) { ChunkMetaData chunkMetaData = JsonConvert.DeserializeObject <ChunkMetaData>(metaData); string extension = Path.GetExtension(chunkMetaData.FileName); string fullPath = Path.Combine(ConfigurationHelper.GetAppDataPath(), string.Concat(uniqueId, extension)); if (files != null) { foreach (HttpPostedFileBase file in files) { AppendToFile(fullPath, file.InputStream); } } return(new FileResult() { Uploaded = chunkMetaData.IsUploaded, FileUid = chunkMetaData.UploadUid, FileName = fullPath }); }
public TextureLoader(ChunkMetaData chunkMetaData) { this.tiling = (float)(1.0f / (float)chunkMetaData.textureSize); this.delta = this.tiling * (chunkMetaData.delta / 100.0f); this.blockTexture = new List <TextureRect[]> (); int len = chunkMetaData.blockMetaData.Count; for (int i = 0; i < len; i++) { TextureRect[] rect = new TextureRect[6]; rect [0] = this.GetTextureRect(chunkMetaData.blockMetaData [i].forward); rect [1] = this.GetTextureRect(chunkMetaData.blockMetaData [i].back); rect [2] = this.GetTextureRect(chunkMetaData.blockMetaData [i].top); rect [3] = this.GetTextureRect(chunkMetaData.blockMetaData [i].down); rect [4] = this.GetTextureRect(chunkMetaData.blockMetaData [i].left); rect [5] = this.GetTextureRect(chunkMetaData.blockMetaData [i].right); this.blockTexture.Add(rect); } }
public async Task <IActionResult> Upload(IEnumerable <IFormFile> files, [FromForm] string metaData) { if (User.Identity.IsAuthenticated == false) { return(BadRequest(new Models.ApiErrorResult("User must be authenticated to view measures."))); } if (!Request.ContentType.Contains("multipart/form-data")) { return(BadRequest(new Models.ApiErrorResult("Content must be mime multipart."))); } if (!User.Claims.Any(cl => cl.Type == Identity.Claims.SubmitMeasure_Key)) { return(BadRequest(new Models.ApiErrorResult("The user does not have permission to submit measures."))); } ChunkMetaData metadata = Newtonsoft.Json.JsonConvert.DeserializeObject <ChunkMetaData>(Request.Form["metadata"]); if (!metadata.FileExtension.EndsWith("xlsx", StringComparison.OrdinalIgnoreCase) && !metadata.FileExtension.EndsWith("json", StringComparison.OrdinalIgnoreCase)) { return(BadRequest(new Models.ApiErrorResult("Only Excel and json files are valid."))); } var user = await _modelDB.Users.FindAsync(Guid.Parse(User.Claims.Where(x => x.Type == ClaimTypes.NameIdentifier).Select(x => x.Value).FirstOrDefault())); await _fileService.WriteToStreamAsync(files.FirstOrDefault(), metadata); if (!metadata.IsFinalChunck) { return(Ok(new UploadResult(metadata.UploadUid, metadata.IsFinalChunck))); } List <string> errors = new List <string>(); string metricName = null; Guid? metricID = null; try { DQM.Models.MeasureSubmissionViewModel measure = null; if (metadata.FileExtension.EndsWith("xlsx", StringComparison.OrdinalIgnoreCase)) { using (var stream = _fileService.ReturnTempFileStream(metadata.UploadUid)) using (var document = SpreadsheetDocument.Open(stream, false)) { var reader = new ASPE.DQM.Utils.MeasuresExcelReader(document); measure = reader.Convert(errors); document.Close(); } //Can delete the excel file regardless of validation, will be saved as json if successfull await _fileService.DeleteTempFileChunkAsync(metadata.UploadUid); if (errors.Count > 0) { return(BadRequest(new UploadResult(metadata.UploadUid, true, errors.ToArray()))); } //validate the submission. if (ValidateSubmission(measure, errors) == false) { return(BadRequest(new UploadResult(metadata.UploadUid, true, errors.ToArray()))); } //save as json if valid using (var ms = new System.IO.MemoryStream()) { using (var sw = new System.IO.StreamWriter(ms, System.Text.Encoding.UTF8, 1024, true)) using (var jw = new Newtonsoft.Json.JsonTextWriter(sw)) { var serializerSettings = new Newtonsoft.Json.JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.None, DateFormatString = "'yyyy-MM-dd'" }; var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.DateFormatString = "yyyy'-'MM'-'dd"; serializer.Formatting = Newtonsoft.Json.Formatting.None; serializer.Serialize(jw, measure); await jw.FlushAsync(); } ms.Seek(0, System.IO.SeekOrigin.Begin); //ms.Position = 0; await _fileService.WriteToStreamAsync(metadata.UploadUid, 0, ms); } } else { //assume json file using (var stream = _fileService.ReturnTempFileStream(metadata.UploadUid)) using (var sr = new System.IO.StreamReader(stream)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) { var serializer = new Newtonsoft.Json.JsonSerializer(); serializer.DateFormatString = "yyyy'-'MM'-'dd"; measure = serializer.Deserialize <Models.MeasureSubmissionViewModel>(jr); } //validate the submission. if (ValidateSubmission(measure, errors) == false) { //upload is invalid, delete the temp file await _fileService.DeleteTempFileChunkAsync(metadata.UploadUid); return(BadRequest(new UploadResult(metadata.UploadUid, true, errors.ToArray()))); } } metricName = await _modelDB.Metrics.Where(m => m.ID == measure.MetricID.Value).Select(m => m.Title).FirstOrDefaultAsync(); metricID = measure.MetricID; }catch (Exception ex) { _logger.LogError(ex, "Error validating pending measure upload."); errors.Add(ex.Message); } if (errors.Count > 0) { return(BadRequest(new UploadResult(metadata.UploadUid, true, errors.ToArray()) { metricID = metricID, metricName = metricName })); } return(Ok(new UploadResult(metadata.UploadUid, true, metricID, metricName))); }
private void RefreshChunkMetaData(ChunkMetaData metaData, ChunkColumnInfo[] columnsInfo) { metaData.setChunkMaxHeightBuilt(columnsInfo); //A generated chunk is always considered as wild ! metaData.IsWild = true; }
uint GetPredictionAckMessageSize(uint chunksCount) { return(sizeof(uint) + sizeof(uint) /*+ sizeof(uint)*/ + (uint)chunksCount * ChunkMetaData.GetSize()); }
public IEnumerable <Attachment> SaveToTemp(IEnumerable files, ChunkMetaData chunkMetaData = null) { if (!(files is HttpFileCollectionBase httpFiles)) { return(null); } var attachments = new List <Attachment>(); StorageOperation( () => { var directoryPath = Path.Combine( HttpContext.Current.Server.MapPath(ConfigurationReader.AttachmentsVirtualPath), ConfigurationReader.AttachmentsTempDir); for (var i = 0; i < httpFiles.Count; i++) { var file = httpFiles[i]; var fileName = Path.GetFileName(chunkMetaData?.FileName ?? file.FileName); var filePath = chunkMetaData?.UploadUid.IsNotNullOrEmpty() == true ? HttpContext.Current.Session[chunkMetaData.UploadUid] as string : string.Empty; if (filePath.IsNullOrEmpty()) { filePath = Path.Combine(directoryPath, fileName); filePath = GetUniqueFullFilePath(filePath); if (chunkMetaData?.UploadUid.IsNotNullOrEmpty() == true) { HttpContext.Current.Session[chunkMetaData.UploadUid] = filePath; } } if (chunkMetaData == null) { file.SaveAs(filePath); } else { AppendToFile(filePath, file.InputStream); } var newFileName = Path.GetFileName(filePath); var attachment = new Attachment { Url = urlHelper.Content( $"{ConfigurationReader.AttachmentsVirtualPath}/{ConfigurationReader.AttachmentsTempDir}/{newFileName}" .Trim()), Name = newFileName, Size = file.ContentLength }; attachments.Add(attachment); } }); return(attachments); }