public void OD_MBO_BuiltIn_TakeLockOver() { ODataTest(() => { var systemFolderCtdId = ContentType.GetByName("SystemFolder").Id; var user = CreateUser("*****@*****.**"); SecurityHandler.CreateAclEditor() .Allow(2, user.Id, false, PermissionType.PermissionTypes) .Allow(systemFolderCtdId, user.Id, false, PermissionType.See) .Apply(); File file; using (new CurrentUserBlock(user)) { file = new File(CreateTestRoot("TestFiles")) { Name = "File-1" }; file.Save(); file.CheckOut(); } Assert.AreEqual(user.Id, file.LockedById); var url = ODataTools.GetODataUrl(Content.Create(file)); var response = ODataPostAsync($"{url}/TakeLockOver", "", "models=[{'user':'******'}]") .ConfigureAwait(false).GetAwaiter().GetResult(); Assert.AreEqual(200, response.StatusCode); Assert.AreEqual("Ok", response.Result); file = Node.Load <File>(file.Id); Assert.AreEqual(Identifiers.AdministratorUserId, file.LockedById); }); }
void btnExport_Click(object sender, EventArgs e) { if (CurrentForm != null) { string fileName = string.Concat("_", CurrentForm.Name, DateTime.UtcNow.ToString("yyyy_MM_dd___HH_mm_ss"), ".csv"); var csv = new SNC.File(CurrentForm); csv.Name = fileName; csv.Binary = new BinaryData(); csv.Binary.FileName = fileName; csv.Binary.ContentType = "application/vnd.ms-excel"; string text = GetCSV(CurrentForm); MemoryStream stream = new MemoryStream(); StreamWriter writer = new StreamWriter(stream, Encoding.GetEncoding("windows-1250")); writer.Write(text); writer.Flush(); csv.Binary.SetStream(stream); csv.Save(); //HttpContext.Current.Response.ClearHeaders(); //HttpContext.Current.Response.Clear(); //HttpContext.Current.Response.AddHeader("Content-Disposition", "attachment; filename=" + csv.Name); //HttpContext.Current.Response.AddHeader("Content-Length", csv.Binary.Size.ToString()); //HttpContext.Current.Response.ContentType = "application/vnd.ms-excel"; //HttpContext.Current.Response.Write(new BinaryReader(csv.Binary.GetStream()).ReadString()); //HttpContext.Current.Response.End(); (this.Parent as SingleContentView).OnUserAction(sender, "cancel", "Click"); } }
public void OD_MBO_BuiltIn_TakeOwnership() { ODataTest(() => { File file; using (new CurrentUserBlock(User.Administrator)) { file = new File(CreateTestRoot("TestFiles")) { Name = Guid.NewGuid().ToString() }; file.Save(); Assert.AreEqual(Identifiers.AdministratorUserId, file.OwnerId); } var user = CreateUser("*****@*****.**"); var url = ODataTools.GetODataUrl(Content.Create(file)); var response = ODataPostAsync($"{url}/TakeOwnership", "", $"models=[{{'userOrGroup':'{user.Path}'}}]") .ConfigureAwait(false).GetAwaiter().GetResult(); Assert.AreEqual(204, response.StatusCode); file = Node.Load <File>(file.Id); Assert.AreEqual(user.Id, file.OwnerId); }); }
private void CreateOrModifyCacheFile(BinaryData cacheBinary, bool compress) { SN.File f = null; MemoryStream cacheStream = new MemoryStream(); if (compress) { GZipOutputStream gzipStream = new GZipOutputStream(cacheStream); byte[] buff = Encoding.ASCII.GetBytes(this._content.ToCharArray()); gzipStream.Write(buff, 0, buff.Length); gzipStream.Flush(); gzipStream.Close(); // set compressed binary byte[] compressedData = cacheStream.ToArray(); cacheBinary.SetStream(new MemoryStream(compressedData)); } else { cacheBinary.SetStream(Tools.GetStreamFromString(_content)); } // gets cache file or creates a new one, the new stream will be saved in both cases if (!Node.Exists(FullCacheFilePath)) { f = SN.File.CreateByBinary(this.CacheFolder, cacheBinary); f.Name = _cacheFile; } else { f = Node.Load <SN.File>(this.FullCacheFilePath); f.Binary = cacheBinary; } f.Save(); }
public void InMemDb_Core_ChunkUpload_NewFile() { Test(async() => { var blobStorage = Providers.Instance.BlobStorage; var root = CreateTestRoot(); var file = new File(root) { Name = "File1.txt" }; file.Binary.ContentType = "application/octet-stream"; //file.Binary.FileName = "File1.txt"; file.Save(); var chunks = new[] { new byte[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, new byte[] { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, new byte[] { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }, new byte[] { 4, 4 } }; var chunkSize = chunks[0].Length; // START CHUNK var versionId = file.VersionId; var propertyTypeId = PropertyType.GetByName("Binary").Id; var fullSize = 50L; var token = await blobStorage.StartChunkAsync(versionId, propertyTypeId, fullSize, CancellationToken.None) .ConfigureAwait(false); // WRITE CHUNKS for (int i = 0; i < chunks.Length; i++) { var offset = i * chunkSize; var chunk = chunks[i]; await blobStorage.WriteChunkAsync(versionId, token, chunk, offset, fullSize, CancellationToken.None).ConfigureAwait(false); } // COMMIT CHUNK await blobStorage.CommitChunkAsync(versionId, propertyTypeId, token, fullSize, null, CancellationToken.None) .ConfigureAwait(false); // ASSERT Cache.Reset(); file = Node.Load <File>(file.Id); var length = Convert.ToInt32(file.Binary.Size); var buffer = new byte[length]; using (var stream = file.Binary.GetStream()) stream.Read(buffer, 0, length); Assert.AreEqual( "11111111111111112222222222222222333333333333333344", new string(buffer.Select(b => (char)(b + '0')).ToArray())); }).GetAwaiter().GetResult(); }
/// <summary> /// Creates new Node of specified Content Type /// </summary> /// <param name="contentTypeName"></param> /// <param name="parent"></param> /// <param name="fileName"></param> /// <param name="stream"></param> public static void CreateNodeOfType(string contentTypeName, Node parent, string fileName, Stream stream) { var node = new SenseNet.ContentRepository.File(parent, contentTypeName); if (CheckAllowedContentType(parent as GenericContent, node)) { node.Name = fileName; node.SetBinary("Binary", UploadHelper.CreateBinaryData(fileName, stream)); node.Save(); } }
private async STT.Task PutToCacheAsync(DatabaseUsage databaseUsage, File cached, CancellationToken cancel) { var resultBuilder = new StringBuilder(); using (var writer = new StringWriter(resultBuilder)) JsonSerializer.Create(SerializerSettings).Serialize(writer, databaseUsage); using (new SystemAccount()) { cached ??= await CreateCacheFileAsync(cancel); if (cached == null) { return; } var iteration = 0; try { var serialized = resultBuilder.ToString(); var cachedStream = !cached.IsNew ? cached.Binary?.GetStream() : null; var cachedData = cachedStream != null?RepositoryTools.GetStreamString(cachedStream) : string.Empty; // save the content only if there was a change if (string.Equals(serialized, cachedData)) { return; } Retrier.Retry(5, 500, typeof(NodeIsOutOfDateException), () => { iteration++; // reload to have a fresh instance if (!cached.IsNew && iteration > 1) { cached = Node.Load <File>(cached.Id); } cached.SetCachedData(CacheKey, databaseUsage); cached.Binary.SetStream(RepositoryTools.GetStreamFromString(serialized)); cached.Save(SavingMode.KeepVersion); _logger.LogTrace($"DatabaseUsage.cache has been saved. Iteration: {iteration}"); }); } catch (Exception e) { _logger.LogWarning(e, $"An error occurred during saving DatabaseUsage.cache in iteration {iteration}."); // do nothing } } }
private File CreateTestFile(Node parent, string fileContent) { var file = new File(parent) { Name = Guid.NewGuid().ToString() }; file.Binary.SetStream(RepositoryTools.GetStreamFromString(fileContent)); file.Save(); return(file); }
private static void SaveFile(File file, string lockValue) { file.SetCachedData(WopiService.ExpectedSharedLock, lockValue); try { file.Save(); } finally { file.ResetCachedData(WopiService.ExpectedSharedLock); } }
public static void SetAttachment(SenseNet.ContentRepository.File file, FileAttachment fileAttachment) { using (var stream = new MemoryStream()) { fileAttachment.Load(stream); stream.Seek(0, SeekOrigin.Begin); var binaryData = new BinaryData(); binaryData.SetStream(stream); file.Binary = binaryData; file.Save(); } }
public File LoadTestFile() { if (_testFileId == 0) { var folder = CreateFolder(); var file = new File(folder) { Name = Guid.NewGuid().ToString() }; file.Binary.SetStream(RepositoryTools.GetStreamFromString(OriginalFileContent)); file.Save(); _testFileId = file.Id; } return(Node.Load <File>(_testFileId)); }
public FileOperation(string fileName = null) { var fileContainer = Node.Load <SystemFolder>("/Root/TestFiles"); if (fileContainer == null) { fileContainer = new SystemFolder(Repository.Root) { Name = "TestFiles" }; fileContainer.Save(); } TheFile = new File(fileContainer) { Name = fileName ?? Guid.NewGuid().ToString() }; TheFile.Binary.SetStream(RepositoryTools.GetStreamFromString("Lorem ipsum...")); TheFile.Save(); }
void btnExport_Click(object sender, EventArgs e) { if (CurrentForm != null) { string fileName = string.Concat("_", CurrentForm.Name, DateTime.Now.ToString("yyyy_MM_dd___HH_mm_ss"), ".csv"); var csv = new SNC.File(CurrentForm); csv.Name = fileName; csv.Binary = new BinaryData(); csv.Binary.FileName = fileName; csv.Binary.ContentType = "application/vnd.ms-excel"; string text = GetCSV(CurrentForm); MemoryStream stream = new MemoryStream(); StreamWriter writer = new StreamWriter(stream, Encoding.GetEncoding("windows-1250")); writer.Write(text); writer.Flush(); csv.Binary.SetStream(stream); csv.Save(); //HttpContext.Current.Response.ClearHeaders(); //HttpContext.Current.Response.Clear(); //HttpContext.Current.Response.AddHeader("Content-Disposition", "attachment; filename=" + csv.Name); //HttpContext.Current.Response.AddHeader("Content-Length", csv.Binary.Size.ToString()); //HttpContext.Current.Response.ContentType = "application/vnd.ms-excel"; //HttpContext.Current.Response.Write(new BinaryReader(csv.Binary.GetStream()).ReadString()); //HttpContext.Current.Response.End(); (this.Parent as SingleContentView).OnUserAction(sender, "cancel", "Click"); } }
private List <SyncResultObject> GetFilesAndSync(Content cntToSync, string filePath) { var result = new List <SyncResultObject>(); DirectoryInfo dirInfo = new DirectoryInfo(filePath); IEnumerable <Content> children = cntToSync.Children.Where(c => c.TypeIs("File")); //Content lastContent = children.OrderByDescending(c => c.CreationDate).FirstOrDefault(); //DateTime lastContentDate = (lastContent != null) ? lastContent.CreationDate : DateTime.MinValue; //technical debt: I think creationdate won't be good here, we should probably use last syncdate var fileInfos = dirInfo.GetFiles(); //if (fileInfos.Length == 0) // result.Add(new SyncResultObject(filePath, SyncResult.NoSyncToDo)); //else if (fileInfos.Length > 0) { foreach (var file in fileInfos) { var fileSynced = false; string fileName = file.Name; try { using (Stream fileStream = file.Open(FileMode.Open, FileAccess.Read)) //Open the file ReadOnly mode { fileName = ContentNamingHelper.GetNameFromDisplayName(file.Name); using (new SystemAccount()) {//Technical Debt: as for now we do not check if file needs to be updated or not Content fileContent = cntToSync.Children.Where(c => c.Name == fileName).FirstOrDefault(); if (fileContent == null) { // create new SenseNet.ContentRepository.File newFile = new SenseNet.ContentRepository.File(cntToSync.ContentHandler); newFile.Name = ContentNamingHelper.GetNameFromDisplayName(file.Name); newFile.DisplayName = file.Name; newFile.Save(); fileContent = Content.Load(newFile.Id); var fileSyncAspect = Aspect.LoadAspectByPathOrName(ASPECTNAME); fileContent.Save(); // ez miert? elo is kell menteni? fileContent.AddAspects(fileSyncAspect); } SaveAsTechnicalUser(fileContent, fileStream); result.Add(new SyncResultObject(fileContent.Path, SyncResult.SyncSuccess)); } fileSynced = true; } } catch (Exception ex) { SnLog.WriteException(ex); result.Add(new SyncResultObject(string.Concat(cntToSync.Path, "/", fileName), SyncResult.SyncFailed)); } // result add would be better at here // delete file try { if (fileSynced) { //Logger.WriteInformation(40002, "FILESYNC delete 545 " + file.Name); file.Delete(); } // should we log deletion? } catch (Exception ex) { SnLog.WriteException(ex); } } } //// save refresh date on parent //SaveAsTechnicalUser(cntToSync, null, true); return(result); }