public XpsDocumentReader(FileMetadata metaData, IFilesProvider filesProvider) { this.metadata = metaData; this.filesProvider = filesProvider; this.loaded = false; this.removeCommand = new RelayCommand(this.Remove); }
private static ImmutableDictionary<string, object> ApplyFileMetadata( string file, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata) { if (fileMetadata == null || fileMetadata.Count == 0) return metadata; var result = new Dictionary<string, object>(metadata); var baseDir = string.IsNullOrEmpty(fileMetadata.BaseDir) ? Directory.GetCurrentDirectory() : fileMetadata.BaseDir; var relativePath = PathUtility.MakeRelativePath(baseDir, file); foreach (var item in fileMetadata) { // As the latter one overrides the former one, match the pattern from latter to former for (int i = item.Value.Length - 1; i >= 0; i--) { if (item.Value[i].Glob.Match(relativePath)) { // override global metadata if metadata is defined in file metadata result[item.Value[i].Key] = item.Value[i].Value; Logger.LogVerbose($"{relativePath} matches file metadata with glob pattern {item.Value[i].Glob.Raw} for property {item.Value[i].Key}"); break; } } } return result.ToImmutableDictionary(); }
public override FileModel Load(IDocumentProcessor processor, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata, FileAndType file) { using (new LoggerFileScope(file.File)) { if (CanProcessorIncremental(processor)) { ChangeKindWithDependency ck; string fileKey = ((RelativePath)file.File).GetPathFromWorkingFolder().ToString(); if (IncrementalContext.ChangeDict.TryGetValue(fileKey, out ck)) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}, ChangeType {ck}."); if (ck == ChangeKindWithDependency.Deleted) { return null; } if (ck == ChangeKindWithDependency.None) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Check incremental..."); if (processor.BuildSteps.Cast<ISupportIncrementalBuildStep>().All(step => step.CanIncrementalBuild(file))) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Skip build by incremental."); return null; } Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Incremental not available."); } } } return base.Load(processor, metadata, fileMetadata, file); } }
private static bool DoesCacheMatch(FileInfo currentFile, FileMetadata storedMetadata) { currentFile.Refresh(); return currentFile.Length == storedMetadata.Length && currentFile.LastWriteTimeUtc == storedMetadata.LastWriteTime && currentFile.CreationTimeUtc == storedMetadata.CreationTime ; }
private IDocumentReader OpenFile(FileMetadata fileMetaData) { if (fileMetaData.Extension == ".xps") { XpsDocumentReader model = new XpsDocumentReader(fileMetaData, this.filesProvider); return model; } return null; }
public void AddOrUpdate(string filename, FileMetadata fileMetadata) { if (!this.Contains(filename)) { filenames[index] = filename; infos[filename] = new RegisterInfo(); infos[filename].index = index; infos[filename].fileData = new FileData(); infos[filename].fileMetadata = fileMetadata; Interlocked.Increment(ref index); Interlocked.Increment(ref count); } else { infos[filename].fileMetadata = fileMetadata; } }
public virtual FileModel Load(IDocumentProcessor processor, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata, FileAndType file) { using (new LoggerFileScope(file.File)) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Loading..."); var path = Path.Combine(file.BaseDir, file.File); metadata = ApplyFileMetadata(path, metadata, fileMetadata); try { return processor.Load(file, metadata); } catch (Exception) { Logger.LogError($"Unable to load file: {file.File} via processor: {processor.Name}."); throw; } } }
public FileMetadata AddFile(string path) { string hash = this.CalculateHash(path); string name = Path.GetFileNameWithoutExtension(path); string ext = Path.GetExtension(path); FileMetadata fmd = new FileMetadata(hash, ModelState.Added, name, ext, this.documentsDir); File.Copy(path, fmd.LocalPath); this.OnFileAdded(fmd); lock (this.metadata.FilesMetadata) { this.metadata.FilesMetadata.Add(fmd); this.SaveMetadata(); } return fmd; }
public bool DownloadFile(FileMetadata fileMetadata) { if (!this.IsAuthorized) { return false; } string apiPath = this.filesPath + fileMetadata.Id; HttpResponseMessage response = this.AuthorizedClient.GetAsync(apiPath).Result; try { response.Content.ReadAsFileStreamAsync(fileMetadata.LocalPath); } catch { return false; } return true; }
public static FileMetadata CreateFile( string id, string name, string catalog, DateTime createTime, string extension, string mediaId, DateTime modifiedTime, string modifier, string owner, long size, PropertyCollection propertys) { var metadata = new FileMetadata { CatalogUri = catalog, CreatedTime = createTime, Extension = extension, Id = id, IsDel = false, MediaId = mediaId, LastModifiedTime = modifiedTime, LastModifier = modifier, Owner = owner, ResourceName = name, ResourceSize = size, Revision = 1, Propertys = propertys }; if (string.IsNullOrEmpty(metadata.Id)) { metadata.Id = Guid.NewGuid().ToString().ToUpper(); foreach (var ppt in metadata.Propertys) { ppt.FileId = metadata.Id; } } return metadata; }
private void OnFileRemoved(FileMetadata fileMetadata) { FilesProviderHandler handler = this.FileRemoved; if (handler != null) { handler(this, fileMetadata); } }
public JSONFileReaderStrategy(FileMetadata file) { this.file = file; this.jsonFileDecryptor = new JSONFileDecryptor(); }
public bool DownloadFile(FileMetadata blobInfo) { return(true); }
private async Task UpdateDatabaseMetadataAsync(FileMetadata fileMetadata, bool updateAlbumArtwork) { Track track = await this.trackRepository.GetTrackAsync(fileMetadata.SafePath); if (track == null) { return; } // Track if (fileMetadata.Title.IsValueChanged) { track.TrackTitle = fileMetadata.Title.Value; } if (fileMetadata.Year.IsValueChanged) { track.Year = fileMetadata.Year.Value.SafeConvertToLong(); } if (fileMetadata.TrackNumber.IsValueChanged) { track.TrackNumber = fileMetadata.TrackNumber.Value.SafeConvertToLong(); } if (fileMetadata.TrackCount.IsValueChanged) { track.TrackCount = fileMetadata.TrackCount.Value.SafeConvertToLong(); } if (fileMetadata.DiscNumber.IsValueChanged) { track.DiscNumber = fileMetadata.DiscNumber.Value.SafeConvertToLong(); } if (fileMetadata.DiscCount.IsValueChanged) { track.DiscCount = fileMetadata.DiscCount.Value.SafeConvertToLong(); } if (fileMetadata.Lyrics.IsValueChanged) { track.HasLyrics = string.IsNullOrWhiteSpace(fileMetadata.Lyrics.Value) ? 0 : 1; } // Artist if (fileMetadata.Artists.IsValueChanged) { string newArtistName = fileMetadata.Artists.Values != null && !string.IsNullOrEmpty(fileMetadata.Artists.Values.FirstOrDefault()) ? fileMetadata.Artists.Values.FirstOrDefault() : Defaults.UnknownArtistString; Artist artist = await this.artistRepository.GetArtistAsync(newArtistName); if (artist == null) { artist = await this.artistRepository.AddArtistAsync(new Artist { ArtistName = newArtistName }); } if (artist != null) { track.ArtistID = artist.ArtistID; } } // Genre if (fileMetadata.Genres.IsValueChanged) { string newGenreName = fileMetadata.Genres.Values != null && !string.IsNullOrEmpty(fileMetadata.Genres.Values.FirstOrDefault()) ? fileMetadata.Genres.Values.FirstOrDefault() : Defaults.UnknownGenreString; Genre genre = await this.genreRepository.GetGenreAsync(newGenreName); if (genre == null) { genre = await this.genreRepository.AddGenreAsync(new Genre { GenreName = newGenreName }); } if (genre != null) { track.GenreID = genre.GenreID; } } // Album if (fileMetadata.Album.IsValueChanged || fileMetadata.AlbumArtists.IsValueChanged || fileMetadata.Year.IsValueChanged) { string newAlbumTitle = !string.IsNullOrWhiteSpace(fileMetadata.Album.Value) ? fileMetadata.Album.Value : Defaults.UnknownAlbumString; string newAlbumArtist = fileMetadata.AlbumArtists.Values != null && !string.IsNullOrEmpty(fileMetadata.AlbumArtists.Values.FirstOrDefault()) ? fileMetadata.AlbumArtists.Values.FirstOrDefault() : Defaults.UnknownAlbumArtistString; Album album = await this.albumRepository.GetAlbumAsync(newAlbumTitle, newAlbumArtist); if (album == null) { album = new Album { AlbumTitle = newAlbumTitle, AlbumArtist = newAlbumArtist, DateLastSynced = DateTime.Now.Ticks }; album.ArtworkID = await this.cacheService.CacheArtworkAsync(IndexerUtils.GetArtwork(album, track.Path)); album = await this.albumRepository.AddAlbumAsync(album); } if (album != null) { track.AlbumID = album.AlbumID; } await Task.Run(() => MetadataUtils.UpdateAlbumYear(album, fileMetadata.Year.Value.SafeConvertToLong())); // Update Album year await this.albumRepository.UpdateAlbumAsync(album); } await this.trackRepository.UpdateTrackAsync(track); // Update Track in the database if (updateAlbumArtwork) { // Get album artist string albumArtist = fileMetadata.AlbumArtists.Values != null && !string.IsNullOrEmpty(fileMetadata.AlbumArtists.Values.FirstOrDefault()) ? fileMetadata.AlbumArtists.Values.FirstOrDefault() : string.Empty; // If no album artist is found, use the artist name. The album was probably saved using the artist name. if (string.IsNullOrEmpty(albumArtist)) { albumArtist = fileMetadata.Artists.Values != null && !string.IsNullOrEmpty(fileMetadata.Artists.Values.FirstOrDefault()) ? fileMetadata.Artists.Values.FirstOrDefault() : Defaults.UnknownAlbumArtistString; } // Get the album title string albumTitle = !string.IsNullOrWhiteSpace(fileMetadata.Album.Value) ? fileMetadata.Album.Value : Defaults.UnknownAlbumString; // Cache the new artwork string artworkID = await this.cacheService.CacheArtworkAsync(fileMetadata.ArtworkData.Value); // Update the album artwork in the database await this.albumRepository.UpdateAlbumArtworkAsync(albumTitle, albumArtist, artworkID); } }
private static FileModel Load( IDocumentProcessor processor, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata, FileAndType file, bool canProcessorIncremental, DocumentBuildContext context) { using (new LoggerFileScope(file.File)) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Loading..."); if (canProcessorIncremental) { var incrementalContext = context.IncrementalBuildContext; ChangeKindWithDependency ck; string fileKey = ((TypeForwardedToRelativePath)file.File).GetPathFromWorkingFolder().ToString(); if (incrementalContext.ChangeDict.TryGetValue(fileKey, out ck)) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}, ChangeType {ck}."); if (ck == ChangeKindWithDependency.Deleted) { return null; } if (ck == ChangeKindWithDependency.None) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Check incremental..."); if (processor.BuildSteps.Cast<ISupportIncrementalBuildStep>().All(step => step.CanIncrementalBuild(file))) { Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Skip build by incremental."); return null; } Logger.LogDiagnostic($"Processor {processor.Name}, File {file.FullPath}: Incremental not available."); } } } var path = Path.Combine(file.BaseDir, file.File); metadata = ApplyFileMetadata(path, metadata, fileMetadata); try { return processor.Load(file, metadata); } catch (Exception) { Logger.LogError($"Unable to load file: {file.File} via processor: {processor.Name}."); throw; } } }
private IList<FileMetadata> GetFileMetadataForAdminUser() { var files = _files.GetBlobs(); var filesPrefix = _files.Uri + "/"; var fileMetadata = new List<FileMetadata>(); foreach (var file in files) { var fileUri = file.Uri.ToString(); var filePath = fileUri.Substring(filesPrefix.Length); var fileOwnerId = filePath.Substring(0, filePath.IndexOf('/')); var fileOwner = GetUserEmailByUserId(fileOwnerId); var fileName = filePath.Substring(fileOwnerId.Length + 1); // To avoid '/' var fileContentType = Shared.GetContentType(fileName); var fileSize = ConvertBytesToKilobytes(file.Properties.Length); var metadata = new FileMetadata(fileName, fileContentType, fileUri, fileOwner, fileSize); fileMetadata.Add(metadata); } return fileMetadata; }
/// <summary> /// Updates a file, such as with a new owner (from anonymous to the newly logged in or registered user) /// </summary> /// <param name="updated"></param> /// <param name="existing"></param> public void Update(FileMetadata item) { //changes are tracked _context.SaveChanges(); }
protected override async Task <FileSystemEntity[]> EnumDirectoryImplAsync(string directoryPath, EnumDirectoryFlags flags, CancellationToken cancel = default) { using (VfsPathParserContext ctx = await ParsePathInternalAsync(directoryPath, cancel)) { if (ctx.Exception != null) { throw ctx.Exception; } if (ctx.LastEntity is VfsDirectory thisDirObject) { var entities = await thisDirObject.EnumEntitiesAsync(flags, cancel); List <FileSystemEntity> ret = new List <FileSystemEntity>(); FileSystemEntity thisDir = new FileSystemEntity( fullPath: ctx.NormalizedPath, name: ".", attributes: thisDirObject.Attributes, creationTime: thisDirObject.CreationTime, lastWriteTime: thisDirObject.LastWriteTime, lastAccessTime: thisDirObject.LastAccessTime ); ret.Add(thisDir); foreach (var entity in entities) { if (entity is VfsDirectory dirObject) { FileMetadata meta = await dirObject.GetMetadataAsync(cancel); FileSystemEntity dir = new FileSystemEntity( fullPath: PathParser.Combine(ctx.NormalizedPath, entity.Name), name: entity.Name, attributes: meta.Attributes ?? FileAttributes.Directory, creationTime: meta.CreationTime ?? Util.ZeroDateTimeOffsetValue, lastWriteTime: meta.LastWriteTime ?? Util.ZeroDateTimeOffsetValue, lastAccessTime: meta.LastAccessTime ?? Util.ZeroDateTimeOffsetValue ); ret.Add(dir); } else if (entity is VfsFile fileObject) { FileMetadata meta = await fileObject.GetMetadataAsync(cancel); FileSystemEntity file = new FileSystemEntity( fullPath: PathParser.Combine(ctx.NormalizedPath, entity.Name), name: entity.Name, size: meta.Size, physicalSize: meta.PhysicalSize, attributes: meta.Attributes ?? FileAttributes.Directory, creationTime: meta.CreationTime ?? Util.ZeroDateTimeOffsetValue, lastWriteTime: meta.LastWriteTime ?? Util.ZeroDateTimeOffsetValue, lastAccessTime: meta.LastAccessTime ?? Util.ZeroDateTimeOffsetValue ); ret.Add(file); } } return(ret.ToArray()); } else { throw new VfsNotFoundException(directoryPath, "Directory not found."); } } }
public virtual Task SetMetadataAsync(FileMetadata metadata, CancellationToken cancel = default) => throw new NotSupportedException();
public FileMetadata UploadFile(FileMetadata aFileMetadata, FileBlobdata aFileBlobdata) { return(null); }
/// <summary> /// Upload whole file. /// </summary> /// <param name="prefix"> The prefix of the fields to be placed in the HTML. </param> /// <param name="location"> The location where the temporary file should be stored. </param> /// <returns> The <see cref="ActionResult"/> containing information about execution of the upload. </returns> private ActionResult UploadWholeFile(string prefix, string location, string tag) { var statuses = new List <FilesStatus>(); for (int i = 0; i < this.Request.Files.Count; i++) { var file = this.Request.Files[i]; Debug.Assert(file != null, "file != null"); var containerName = location.Split("\\".ToCharArray(), 2).FirstOrDefault(); var sourceFileName = Path.GetFileName(file.FileName ?? "") ?? ""; var normalFileName = StringHelper.NormalizeFileName(sourceFileName); var fileNamePrefix = location.Split("\\".ToCharArray(), 2).Skip(1).FirstOrDefault(); var fileExpirationDate = this.datetimeService.UtcNow + TimeSpan.FromDays(file.ContentLength < 10 * 1024000 ? 2 : 10); Debug.Assert(sourceFileName != null, "sourceFileName != null"); var metadataProvider = new DbFileMetadataProvider(this.db, this.datetimeService, this.DbUser.PracticeId); // creating the metadata entry for the main file FileMetadata metadata = metadataProvider.CreateTemporary( containerName, sourceFileName, string.Format("{0}file-{1}-{2}", fileNamePrefix, "{id}", normalFileName), fileExpirationDate, this.DbUser.Id, tag, formatWithId: true); metadata.OwnerUserId = this.DbUser.Id; metadataProvider.SaveChanges(); // saving the file to the storage this.storage.UploadFileToStorage(file.InputStream, containerName, metadata.BlobName); // returning information to the client var fileStatus = new FilesStatus(metadata.Id, sourceFileName, file.ContentLength, prefix); bool imageThumbOk = false; try { var fullStoragePath = string.Format("{0}\\{1}", containerName, metadata.BlobName); var thumbName = string.Format("{0}\\{1}file-{2}-thumb-{4}x{5}-{3}", containerName, fileNamePrefix, metadata.Id, normalFileName, 120, 120); var thumbResult = ImageHelper.TryGetOrCreateThumb(metadata.Id, 120, 120, fullStoragePath, thumbName, true, this.storage, metadataProvider); if (thumbResult.Status == CreateThumbStatus.Ok) { fileStatus.ThumbnailUrl = @"data:" + thumbResult.ContentType + ";base64," + Convert.ToBase64String(thumbResult.Data); fileStatus.IsInGallery = true; imageThumbOk = true; } } // ReSharper disable EmptyGeneralCatchClause catch // ReSharper restore EmptyGeneralCatchClause { } if (!imageThumbOk) { if (StringHelper.IsDocumentFileName(sourceFileName)) { fileStatus.IconClass = "document-file-icon"; } else { fileStatus.IconClass = "generic-file-icon"; } } else { fileStatus.UrlLarge = this.Url.Action("Image", new { w = 1024, h = 768, location, metadata.Id }); } fileStatus.UrlFull = this.Url.Action("File", new { location, metadata.Id }); fileStatus.DeleteUrl = this.Url.Action("Index", new { location, metadata.Id }); statuses.Add(fileStatus); } return(this.JsonIframeSafe(new { files = statuses })); }
public Stream GetFileStream(FileMetadata fileMetaData) { if (!this.metadata.FilesMetadata.Contains(fileMetaData)) { return null; } FileStream stream = File.Open(fileMetaData.LocalPath, FileMode.Open, FileAccess.Read, FileShare.Read); return stream; }
/// <summary> /// Add file metadata to the set. /// </summary> public void Add(FileMetadata metadata) { FileMetadataByVersion metadataByVersion; string filenameCanonical = metadata.filenameCanonical; if (!metadataByCanonicalFilename.TryGetValue( filenameCanonical, out metadataByVersion)) { metadataByVersion = new FileMetadataByVersion(filenameCanonical); } metadataByVersion.Add(metadata); metadataByCanonicalFilename[filenameCanonical] = metadataByVersion; }
public bool IsPointInTile(FileMetadata tileMetadata, GeoPoint point) { return(IsPointInTile(tileMetadata.OriginLatitude, tileMetadata.OriginLongitude, point)); }
public JsonMetadataAdapter() { this.fileMetadata = new FileMetadata(null, ModelState.NotLoaded, null, null, null, null); }
public float ParseGeoDataAtPoint(GeoTiffDictionary adjacentTiles, FileMetadata metadata, double lat, double lon, float lastElevation, IInterpolator interpolator) { float heightValue = 0; try { IGeoTiff mainTiff = adjacentTiles[metadata]; //const double epsilon = (Double.Epsilon * 100); float noData = metadata.NoDataValueFloat; // precise position on the grid (with commas) double ypos = (lat - metadata.StartLat) / metadata.pixelSizeY; double xpos = (lon - metadata.StartLon) / metadata.pixelSizeX; // If pure integers, then it's on the grid float xInterpolationAmount = (float)xpos % 1; float yInterpolationAmount = (float)ypos % 1; bool xOnGrid = xInterpolationAmount == 0; bool yOnGrid = yInterpolationAmount == 0; // If xOnGrid and yOnGrid, we are on a grid intersection, and that's all if (xOnGrid && yOnGrid) { int x = (int)Math.Round(xpos, 0); int y = (int)Math.Round(ypos, 0); var tile = FindTile(metadata, adjacentTiles, x, y, out x, out y); heightValue = mainTiff.ParseGeoDataAtPoint(tile, x, y); } else { int xCeiling = (int)Math.Ceiling(xpos); int xFloor = (int)Math.Floor(xpos); int yCeiling = (int)Math.Ceiling(ypos); int yFloor = (int)Math.Floor(ypos); // Get 4 grid nearest points (DEM grid corners) // If not yOnGrid and not xOnGrid we are on grid horizontal line // We need elevations for top, bottom, left and right grid points (along x axis and y axis) float northWest = GetElevationAtPoint(metadata, adjacentTiles, xFloor, yFloor, NO_DATA_OUT); float northEast = GetElevationAtPoint(metadata, adjacentTiles, xCeiling, yFloor, NO_DATA_OUT); float southWest = GetElevationAtPoint(metadata, adjacentTiles, xFloor, yCeiling, NO_DATA_OUT); float southEast = GetElevationAtPoint(metadata, adjacentTiles, xCeiling, yCeiling, NO_DATA_OUT); float avgHeight = GetAverageExceptForNoDataValue(noData, NO_DATA_OUT, southWest, southEast, northWest, northEast); if (northWest == noData) { northWest = avgHeight; } if (northEast == noData) { northEast = avgHeight; } if (southWest == noData) { southWest = avgHeight; } if (southEast == noData) { southEast = avgHeight; } heightValue = interpolator.Interpolate(southWest, southEast, northWest, northEast, xInterpolationAmount, yInterpolationAmount); } if (heightValue == NO_DATA_OUT) { heightValue = lastElevation; } } catch (Exception e) { Trace.TraceError($"Error while getting elevation data : {e.Message}{Environment.NewLine}{e.ToString()}"); } return(heightValue); }
private void BuildCore( IDocumentProcessor processor, IEnumerable<FileAndType> files, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata, DocumentBuildContext context) { Logger.LogInfo($"Plug-in {processor.Name}: Loading document..."); using (var hostService = new HostService( from file in files select Load(processor, metadata, fileMetadata, file))) { hostService.SourceFiles = context.AllSourceFiles; foreach (var m in hostService.Models) { if (m.LocalPathFromRepoRoot == null) { m.LocalPathFromRepoRoot = Path.Combine(m.BaseDir, m.File); } } Logger.LogInfo($"Plug-in {processor.Name}: Document loaded (count = {hostService.Models.Count})."); Logger.LogInfo($"Plug-in {processor.Name}: Preprocessing..."); Prebuild(processor, hostService); Logger.LogInfo($"Plug-in {processor.Name}: Building..."); BuildArticle(processor, hostService); Logger.LogInfo($"Plug-in {processor.Name}: Postprocessing..."); Postbuild(processor, hostService); Logger.LogInfo($"Plug-in {processor.Name}: Saving..."); Save(processor, hostService, context); } }
private async void RefreshLyricsAsync(PlayableTrack track) { if (track == null) { return; } this.StopHighlighting(); FileMetadata fmd = await this.metadataService.GetFileMetadataAsync(track.Path); await Task.Run(() => { // If we're in editing mode, delay changing the lyrics. if (this.LyricsViewModel != null && this.LyricsViewModel.IsEditing) { this.updateLyricsAfterEditingTimer.Start(); return; } // No FileMetadata available: clear the lyrics. if (fmd == null) { this.ClearLyrics(); return; } }); try { Lyrics lyrics = null; bool mustDownloadLyrics = false; await Task.Run(() => { lyrics = new Lyrics(fmd != null && fmd.Lyrics.Value != null ? fmd.Lyrics.Value : String.Empty, string.Empty); // If the file has no lyrics, and the user enabled automatic download of lyrics, indicate that we need to try to download. if (!lyrics.HasText) { if (SettingsClient.Get <bool>("Lyrics", "DownloadLyrics")) { string artist = fmd.Artists != null && fmd.Artists.Values != null && fmd.Artists.Values.Length > 0 ? fmd.Artists.Values[0] : string.Empty; string title = fmd.Title != null && fmd.Title.Value != null ? fmd.Title.Value : string.Empty; if (!string.IsNullOrWhiteSpace(artist) & !string.IsNullOrWhiteSpace(title)) { mustDownloadLyrics = true; } } } }); // No lyrics were found in the file: try to download. if (mustDownloadLyrics) { this.IsDownloadingLyrics = true; try { var factory = new LyricsFactory(); lyrics = await factory.GetLyricsAsync(fmd.Artists.Values[0], fmd.Title.Value); } catch (Exception ex) { LogClient.Error("Could not get lyrics online {0}. Exception: {1}", track.Path, ex.Message); } this.IsDownloadingLyrics = false; } await Task.Run(() => { this.LyricsViewModel = new LyricsViewModel(container, track, metadataService); this.LyricsViewModel.SetLyrics(lyrics); }); } catch (Exception ex) { this.IsDownloadingLyrics = false; LogClient.Error("Could not show lyrics for Track {0}. Exception: {1}", track.Path, ex.Message); this.ClearLyrics(); return; } this.StartHighlighting(); }
private string GetRegexInput(FileMetadata file) { // For directories, dummy up an extension otherwise the expressions will fail var input = !file.IsFolder ? file.Id : file.Id + ".mkv"; return Path.GetFileName(input); }
public DropboxUploadSuccessResult(FileMetadata metadata) { _metadata = metadata; }
public string Create(byte[] data, string filename) { if (data == null || data.Length == 0) { throw new ArgumentNullException("data"); } var extension = Path.GetExtension(filename); var fileType = FileTypeUtil.DeduceFileTypeFromExtension(extension); var now = DateTime.Now; //路径: [文件类型]/[年]/[月日]/[Encode[毫秒]][Encode[随机串]].后缀 //例如:IMAGE/2016/0314/Xb4J3ims7gC4Qyk4.jpg //注意:文件名区分大小写,必须使用Linux文件系统 //文件句柄: 文件类型年月随机字符串.后缀 //其中文件类型取类型名称的前五个字符,命名时应该注意! //c# GUID目前采用算法4 即前12位是随机数 //xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx var rnd = BitConverter.ToInt64(Guid.NewGuid().ToByteArray().Take(48).ToArray(), 0); var fileTypeName = fileType.ToString().Substring(0, 5).PadLeft(5, '0').ToUpper(); var year = string.Format("{0:yyyy}", now); var monthAndDay = string.Format("{0:MMdd}", now); var fileName = string.Format("{0}{1}{2}", Encode(GetMillisecondsOfADay(now)), Encode(rnd), extension); var fileHandle = string.Format("{0}{1}{2}{3}", fileTypeName, year, monthAndDay, fileName); var file = new FileMetadata() { FileHandle = fileHandle, Size = data.Length, Name = filename, CreatedAt = now, FileType = (int)fileType, Path = Path.Combine(fileTypeName, year, monthAndDay), MimeType = MimeTypeUtil.GetMimeType(extension) }; //save it try { var directory = Path.Combine(fileTypeName, year, monthAndDay); if (!DirectoryExists(directory)) { if (!DirectoryExists(Path.Combine(fileTypeName, year))) { if (!DirectoryExists(fileTypeName)) { MakeDirectory(fileTypeName); } MakeDirectory(Path.Combine(fileTypeName, year)); } MakeDirectory(directory); } var ftpRequest = CreateRequest(Path.Combine(directory, fileName)); ftpRequest.Method = WebRequestMethods.Ftp.UploadFile; ftpRequest.ContentLength = data.Length; using (var requestStream = ftpRequest.GetRequestStream()) { requestStream.Write(data, 0, data.Length); } using (var response = ftpRequest.GetResponse()) using (var reader = new StreamReader(response.GetResponseStream())) { } try { var md5 = new MD5CryptoServiceProvider().ComputeHash(data); file.Md5 = BitConverter.ToString(md5).Replace("-", string.Empty); //todo insert file meta } catch (System.Exception error) { _logger.Error(string.Format("save filemetadata{0} error", fileHandle), error); } } catch (System.Exception error) { _logger.Error(string.Format("create error[{0}]", fileHandle), error); return null; } return fileHandle; }
/// <summary> /// Loads an archived file's data. /// </summary> public byte[] LoadFileData(FileMetadata file) { _r.BaseStream.Position = file.Offset; var fileSize = (int)file.Size; if (_hasNamePrefix) { var len = _r.ReadByte(); fileSize -= len + 1; _r.BaseStream.Position = file.Offset + 1 + len; } var newFileSize = fileSize; var bsaCompressed = file.SizeFlags > 0 && file.Compressed ^ _compressToggle; if (Version == SSE_BSAHEADER_VERSION && bsaCompressed) { newFileSize = _r.ReadLEInt32() - 4; } var fileData = _r.ReadBytes(fileSize); // BSA if (bsaCompressed) { var newFileData = new byte[newFileSize]; if (Version != SSE_BSAHEADER_VERSION) { if (fileData.Length > 4) { using (var s = new MemoryStream(fileData, 4, fileSize - 4)) using (var gs = new InflaterInputStream(s)) gs.Read(newFileData, 0, newFileData.Length); } else { newFileData = fileData; } } else { using (var s = new MemoryStream(fileData)) using (var gs = new LzwInputStream(s)) gs.Read(newFileData, 0, newFileData.Length); } fileData = newFileData; } // General BA2 else if (file.PackedSize > 0 && file.Tex.Chunks == null) { var newFileData = new byte[file.UnpackedSize]; using (var s = new MemoryStream(fileData)) using (var gs = new InflaterInputStream(s)) gs.Read(newFileData, 0, newFileData.Length); fileData = newFileData; } // Fill DDS Header else if (file.Tex.Chunks != null) { // Fill DDS Header var ddsHeader = new DDSHeader { dwFlags = DDSFlags.HEADER_FLAGS_TEXTURE | DDSFlags.HEADER_FLAGS_LINEARSIZE | DDSFlags.HEADER_FLAGS_MIPMAP, dwHeight = file.Tex.Height, dwWidth = file.Tex.Width, dwMipMapCount = file.Tex.NumMips, dwCaps = DDSCaps.SURFACE_FLAGS_TEXTURE | DDSCaps.SURFACE_FLAGS_MIPMAP, dwCaps2 = file.Tex.Unk16 == 2049 ? DDSCaps2.CUBEMAP_ALLFACES : 0, }; var dx10Header = new DDSHeader_DXT10(); var dx10 = false; // map tex format switch (file.Tex.Format) { case DXGIFormat.BC1_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.FourCC; ddsHeader.ddspf.dwFourCC = Encoding.ASCII.GetBytes("DXT1"); ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height / 2U; // 4bpp break; case DXGIFormat.BC2_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.FourCC; ddsHeader.ddspf.dwFourCC = Encoding.ASCII.GetBytes("DXT3"); ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height; // 8bpp break; case DXGIFormat.BC3_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.FourCC; ddsHeader.ddspf.dwFourCC = Encoding.ASCII.GetBytes("DXT5"); ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height; // 8bpp break; case DXGIFormat.BC5_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.FourCC; ddsHeader.ddspf.dwFourCC = Encoding.ASCII.GetBytes("ATI2"); ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height; // 8bpp break; case DXGIFormat.BC7_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.FourCC; ddsHeader.ddspf.dwFourCC = Encoding.ASCII.GetBytes("DX10"); ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height; // 8bpp dx10 = true; dx10Header.dxgiFormat = (int)DXGIFormat.BC7_UNORM; break; case DXGIFormat.DXGI_FORMAT_B8G8R8A8_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.RGB | DDSPixelFormats.AlphaPixels; ddsHeader.ddspf.dwRGBBitCount = 32; ddsHeader.ddspf.dwRBitMask = 0x00FF0000; ddsHeader.ddspf.dwGBitMask = 0x0000FF00; ddsHeader.ddspf.dwBBitMask = 0x000000FF; ddsHeader.ddspf.dwABitMask = 0xFF000000; ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height * 4; // 32bpp break; case DXGIFormat.DXGI_FORMAT_R8_UNORM: ddsHeader.ddspf.dwFlags = DDSPixelFormats.RGB; ddsHeader.ddspf.dwRGBBitCount = 8; ddsHeader.ddspf.dwRBitMask = 0xFF; ddsHeader.dwPitchOrLinearSize = (uint)file.Tex.Width * file.Tex.Height; // 8bpp break; default: throw new InvalidOperationException("DDS FAILED"); } // if (dx10) { dx10Header.resourceDimension = DDSDimension.Texture2D; dx10Header.miscFlag = 0; dx10Header.arraySize = 1; dx10Header.miscFlags2 = 0; dx10Header.Write(null); //char dds2[sizeof(dx10Header)]; //memcpy(dds2, &dx10Header, sizeof(dx10Header)); //content.append(QByteArray::fromRawData(dds2, sizeof(dx10Header))); } } return(fileData); }
/// <summary> /// Create a cached copy of a file's metadata by filename. The metadata will be for the last retrieved or successfully saved version, and will not include any changes that have not /// completed writing. The returned pointer must be released by the user when no longer needed. /// </summary> /// <param name="options">Object containing properties related to which user is requesting metadata, and for which filename</param> /// <param name="outMetadata">A copy of the FileMetadata structure will be set if successful. This data must be released by calling <see cref="Release" />.</param> /// <returns> /// <see cref="Result.Success" /> if the metadata is currently cached, otherwise an error result explaining what went wrong /// </returns> public Result CopyFileMetadataByFilename(CopyFileMetadataByFilenameOptions options, out FileMetadata outMetadata) { System.IntPtr optionsAddress = new System.IntPtr(); Helper.TryMarshalSet <CopyFileMetadataByFilenameOptionsInternal, CopyFileMetadataByFilenameOptions>(ref optionsAddress, options); var outMetadataAddress = System.IntPtr.Zero; var funcResult = EOS_TitleStorage_CopyFileMetadataByFilename(InnerHandle, optionsAddress, ref outMetadataAddress); Helper.TryMarshalDispose(ref optionsAddress); if (Helper.TryMarshalGet <FileMetadataInternal, FileMetadata>(outMetadataAddress, out outMetadata)) { EOS_TitleStorage_FileMetadata_Release(outMetadataAddress); } return(funcResult); }
void ReadMetadata() { // Open Magic = _r.ReadLEUInt32(); if (Magic == F4_BSAHEADER_FILEID) { Version = _r.ReadLEUInt32(); if (Version != F4_BSAHEADER_VERSION) { throw new InvalidOperationException("BAD MAGIC"); } // Read the header var header_Type = _r.ReadASCIIString(4); // 08 GNRL=General, DX10=Textures var header_NumFiles = _r.ReadLEUInt32(); // 0C var header_NameTableOffset = _r.ReadLEUInt64(); // 10 - relative to start of file // Create file metadatas _r.BaseStream.Position = (long)header_NameTableOffset; _files = new FileMetadata[header_NumFiles]; for (var i = 0; i < header_NumFiles; i++) { var length = _r.ReadLEUInt16(); var path = _r.ReadASCIIString(length); _files[i] = new FileMetadata { Path = path, PathHash = Tes4HashFilePath(path), }; } if (header_Type == "GNRL") // General BA2 Format { _r.BaseStream.Position = 16 + 8; // sizeof(header) + 8 for (var i = 0; i < header_NumFiles; i++) { var info_NameHash = _r.ReadLEUInt32(); // 00 var info_Ext = _r.ReadASCIIString(4); // 04 - extension var info_DirHash = _r.ReadLEUInt32(); // 08 var info_Unk0C = _r.ReadLEUInt32(); // 0C - flags? 00100100 var info_Offset = _r.ReadLEUInt64(); // 10 - relative to start of file var info_PackedSize = _r.ReadLEUInt32(); // 18 - packed length (zlib) var info_UnpackedSize = _r.ReadLEUInt32(); // 1C - unpacked length var info_Unk20 = _r.ReadLEUInt32(); // 20 - BAADF00D _files[i].PackedSize = info_PackedSize; _files[i].UnpackedSize = info_UnpackedSize; _files[i].Offset = (long)info_Offset; } } else if (header_Type == "DX10") // Texture BA2 Format { _r.BaseStream.Position = 16 + 8; // sizeof(header) + 8 for (var i = 0; i < header_NumFiles; i++) { var fileMetadata = _files[i]; var info_NameHash = _r.ReadLEUInt32(); // 00 var info_Ext = _r.ReadASCIIString(4); // 04 var info_DirHash = _r.ReadLEUInt32(); // 08 var info_Unk0C = _r.ReadByte(); // 0C var info_NumChunks = _r.ReadByte(); // 0D var info_ChunkHeaderSize = _r.ReadLEUInt16(); // 0E - size of one chunk header var info_Height = _r.ReadLEUInt16(); // 10 var info_Width = _r.ReadLEUInt16(); // 12 var info_NumMips = _r.ReadByte(); // 14 var info_Format = _r.ReadByte(); // 15 - DXGI_FORMAT var info_Unk16 = _r.ReadLEUInt16(); // 16 - 0800 // read tex-chunks var texChunks = new F4TexChunk[info_NumChunks]; for (var j = 0; j < info_NumChunks; j++) { texChunks[j] = new F4TexChunk { Offset = _r.ReadLEUInt64(), // 00 PackedSize = _r.ReadLEUInt32(), // 08 UnpackedSize = _r.ReadLEUInt32(), // 0C StartMip = _r.ReadLEUInt16(), // 10 EndMip = _r.ReadLEUInt16(), // 12 Unk14 = _r.ReadLEUInt32(), // 14 - BAADFOOD } } ; var firstChunk = texChunks[0]; _files[i].PackedSize = firstChunk.PackedSize; _files[i].UnpackedSize = firstChunk.UnpackedSize; _files[i].Offset = (long)firstChunk.Offset; fileMetadata.Tex = new F4Tex { Height = info_Height, Width = info_Width, NumMips = info_NumMips, Format = (DXGIFormat)info_Format, Unk16 = info_Unk16, Chunks = texChunks, }; } } } else if (Magic == OB_BSAHEADER_FILEID) { Version = _r.ReadLEUInt32(); if (Version != OB_BSAHEADER_VERSION && Version != F3_BSAHEADER_VERSION && Version != SSE_BSAHEADER_VERSION) { throw new InvalidOperationException("BAD MAGIC"); } // Read the header var header_FolderRecordOffset = _r.ReadLEUInt32(); // Offset of beginning of folder records var header_ArchiveFlags = _r.ReadLEUInt32(); // Archive flags var header_FolderCount = _r.ReadLEUInt32(); // Total number of folder records (OBBSAFolderInfo) var header_FileCount = _r.ReadLEUInt32(); // Total number of file records (OBBSAFileInfo) var header_FolderNameLength = _r.ReadLEUInt32(); // Total length of folder names var header_FileNameLength = _r.ReadLEUInt32(); // Total length of file names var header_FileFlags = _r.ReadLEUInt32(); // File flags // Calculate some useful values if ((header_ArchiveFlags & OB_BSAARCHIVE_PATHNAMES) == 0 || (header_ArchiveFlags & OB_BSAARCHIVE_FILENAMES) == 0) { throw new InvalidOperationException("HEADER FLAGS"); } _compressToggle = (header_ArchiveFlags & OB_BSAARCHIVE_COMPRESSFILES) != 0; if (Version == F3_BSAHEADER_VERSION || Version == SSE_BSAHEADER_VERSION) { _hasNamePrefix = (header_ArchiveFlags & F3_BSAARCHIVE_PREFIXFULLFILENAMES) != 0; } var folderSize = Version != SSE_BSAHEADER_VERSION ? 16 : 24; // Create file metadatas _files = new FileMetadata[header_FileCount]; var filenamesSectionStartPos = _r.BaseStream.Position = header_FolderRecordOffset + header_FolderNameLength + header_FolderCount * (folderSize + 1) + header_FileCount * 16; var buf = new List <byte>(64); for (var i = 0; i < header_FileCount; i++) { buf.Clear(); byte curCharAsByte; while ((curCharAsByte = _r.ReadByte()) != 0) { buf.Add(curCharAsByte); } var path = Encoding.ASCII.GetString(buf.ToArray()); _files[i] = new FileMetadata { Path = path, }; } if (_r.BaseStream.Position != filenamesSectionStartPos + header_FileNameLength) { throw new InvalidOperationException("HEADER FILENAMES"); } // read-all folders _r.BaseStream.Position = header_FolderRecordOffset; var foldersFiles = new uint[header_FolderCount]; for (var i = 0; i < header_FolderCount; i++) { var folder_Hash = _r.ReadLEUInt64(); // Hash of the folder name var folder_FileCount = _r.ReadLEUInt32(); // Number of files in folder var folder_Unk = 0U; var folder_Offset = 0UL; if (Version == SSE_BSAHEADER_VERSION) { folder_Unk = _r.ReadLEUInt32(); folder_Offset = _r.ReadLEUInt64(); } else { folder_Offset = _r.ReadLEUInt32(); } foldersFiles[i] = folder_FileCount; } // add file var fileNameIndex = 0U; for (var i = 0; i < header_FolderCount; i++) { var folder_name = _r.ReadASCIIString(_r.ReadByte(), ASCIIFormat.PossiblyNullTerminated); // BSAReadSizedString var folderFiles = foldersFiles[i]; for (var j = 0; j < folderFiles; j++) { var file_Hash = _r.ReadLEUInt64(); // Hash of the filename var file_SizeFlags = _r.ReadLEUInt32(); // Size of the data, possibly with OB_BSAFILE_FLAG_COMPRESS set var file_Offset = _r.ReadLEUInt32(); // Offset to raw file data var fileMetadata = _files[fileNameIndex++]; fileMetadata.SizeFlags = file_SizeFlags; fileMetadata.Offset = file_Offset; var path = folder_name + "\\" + fileMetadata.Path; fileMetadata.Path = path; fileMetadata.PathHash = Tes4HashFilePath(path); } } } else if (Magic == MW_BSAHEADER_FILEID) { // Read the header var header_HashOffset = _r.ReadLEUInt32(); // Offset of hash table minus header size (12) var header_FileCount = _r.ReadLEUInt32(); // Number of files in the archive // Calculate some useful values var headerSize = _r.BaseStream.Position; var hashTablePosition = headerSize + header_HashOffset; var fileDataSectionPostion = hashTablePosition + (8 * header_FileCount); // Create file metadatas _files = new FileMetadata[header_FileCount]; for (var i = 0; i < header_FileCount; i++) { _files[i] = new FileMetadata { // Read file sizes/offsets SizeFlags = _r.ReadLEUInt32(), Offset = fileDataSectionPostion + _r.ReadLEUInt32(), } } ; // Read filename offsets var filenameOffsets = new uint[header_FileCount]; // relative offset in filenames section for (var i = 0; i < header_FileCount; i++) { filenameOffsets[i] = _r.ReadLEUInt32(); } // Read filenames var filenamesSectionStartPos = _r.BaseStream.Position; var buf = new List <byte>(64); for (var i = 0; i < header_FileCount; i++) { _r.BaseStream.Position = filenamesSectionStartPos + filenameOffsets[i]; buf.Clear(); byte curCharAsByte; while ((curCharAsByte = _r.ReadByte()) != 0) { buf.Add(curCharAsByte); } _files[i].Path = Encoding.ASCII.GetString(buf.ToArray()); } // Read filename hashes _r.BaseStream.Position = hashTablePosition; for (var i = 0; i < header_FileCount; i++) { _files[i].PathHash = _r.ReadLEUInt64(); } } else { throw new InvalidOperationException("BAD MAGIC"); } // Create the file metadata hash table _filesByHash = _files.ToLookup(x => x.PathHash); // Create a virtual directory tree. RootDir = new VirtualFileSystem.Directory(); foreach (var fileMetadata in _files) { RootDir.CreateDescendantFile(fileMetadata.Path); } } ulong HashFilePath(string filePath) { if (Magic == MW_BSAHEADER_FILEID) { return(Tes3HashFilePath(filePath)); } else { return(Tes4HashFilePath(filePath)); } }
public async Task HandleAsync_FormsWOPICompliantResponseUsingFileMetadataAndUserContextAndFeatures(string title, string description, string groupName, string version, string owner, string fileName, string extension, ulong sizeInBytes, string contentHash) { var cancellationToken = new CancellationToken(); var services = new ServiceCollection(); var fileRepository = new Moq.Mock <IFileRepository>(); var fileRepositoryInvoked = false; services.AddScoped(sp => fileRepository.Object); var httpContext = new DefaultHttpContext { RequestServices = services.BuildServiceProvider() }; using var responseBodyStream = new MemoryStream(); httpContext.Response.Body = responseBodyStream; var fileVersion = Guid.NewGuid().ToString(); var fileMetadata = new FileMetadata( title: title, description: description, groupName: groupName, version: version, owner: owner, name: fileName, extension: extension, blobName: fileName, sizeInBytes: sizeInBytes, lastWriteTime: DateTimeOffset.UtcNow, contentHash: contentHash, fileStatus: FileStatus.Verified ); fileRepository. Setup(x => x.GetMetadataAsync(Moq.It.IsAny <FutureNHS.WOPIHost.File>(), Moq.It.IsAny <CancellationToken>())). Callback((FutureNHS.WOPIHost.File givenFile, CancellationToken givenCancellationToken) => { Assert.IsFalse(givenFile.IsEmpty); Assert.IsFalse(givenCancellationToken.IsCancellationRequested, "Expected the cancellation token to not be cancelled"); Assert.AreSame(fileName, givenFile.Name, "Expected the SUT to request the file from the repository whose name it was provided with"); Assert.AreSame(fileVersion, givenFile.Version, "Expected the SUT to request the file version from the repository that it was provided with"); Assert.AreEqual(cancellationToken, givenCancellationToken, "Expected the same cancellation token to propagate between service interfaces"); fileRepositoryInvoked = true; }). Returns(Task.FromResult(fileMetadata)); var ephemeralDownloadLink = new Uri("https://www.file-storage.com/files/file_id", UriKind.Absolute); fileRepository.Setup(x => x.GeneratePrivateEphemeralDownloadLink(fileMetadata, Moq.It.IsAny <CancellationToken>())).Returns(Task.FromResult(ephemeralDownloadLink)); var features = new Features(); var accessToken = Guid.NewGuid().ToString(); var file = FutureNHS.WOPIHost.File.With(fileName, fileVersion); var checkFileInfoWopiRequest = CheckFileInfoWopiRequest.With(file, accessToken, features); await checkFileInfoWopiRequest.HandleAsync(httpContext, cancellationToken); Assert.IsTrue(fileRepositoryInvoked); Assert.AreEqual("application/json", httpContext.Response.ContentType); Assert.AreSame(responseBodyStream, httpContext.Response.Body); responseBodyStream.Position = 0; dynamic responseBody = await JsonSerializer.DeserializeAsync <ExpandoObject>(responseBodyStream, cancellationToken : cancellationToken); Assert.IsNotNull(responseBody); Assert.AreEqual(fileMetadata.Title, ((JsonElement)(responseBody.BaseFileName)).GetString()); Assert.AreEqual(fileMetadata.Version, ((JsonElement)(responseBody.Version)).GetString()); Assert.AreEqual(fileMetadata.Owner, ((JsonElement)(responseBody.OwnerId)).GetString()); Assert.AreEqual(fileMetadata.Extension, ((JsonElement)(responseBody.FileExtension)).GetString()); Assert.AreEqual(fileMetadata.SizeInBytes, ((JsonElement)(responseBody.Size)).GetUInt64()); Assert.AreEqual(ephemeralDownloadLink.AbsoluteUri, ((JsonElement)(responseBody.FileUrl)).GetString()); Assert.AreEqual(fileMetadata.LastWriteTime.ToIso8601(), ((JsonElement)(responseBody.LastModifiedTime)).GetString()); Assert.AreEqual(FutureNHS.WOPIHost.File.FILENAME_MAXIMUM_LENGTH, ((JsonElement)(responseBody.FileNameMaxLength)).GetInt32()); }
/// <summary> /// Extrae los metadatos del documento /// </summary> public override FileMetadata AnalyzeFile() { try { this.foundMetadata = new FileMetadata(); using (ZipFile zip = ZipFile.Read(this.fileStream)) { string strFile = "meta.xml"; if (zip.EntryFileNames.Contains(strFile)) { using (Stream stmXML = new MemoryStream()) { zip.Extract(strFile, stmXML); stmXML.Seek(0, SeekOrigin.Begin); AnalizeFileMeta(stmXML); } } strFile = "settings.xml"; if (zip.EntryFileNames.Contains(strFile)) { using (Stream stmXML = new MemoryStream()) { zip.Extract(strFile, stmXML); stmXML.Seek(0, SeekOrigin.Begin); analizeFileSettings(stmXML); } } strFile = "content.xml"; if (zip.EntryFileNames.Contains(strFile)) { using (Stream stmXML = new MemoryStream()) { zip.Extract(strFile, stmXML); stmXML.Seek(0, SeekOrigin.Begin); AnalizeFileContent(stmXML); } } strFile = "VersionList.xml"; if (zip.EntryFileNames.Contains(strFile)) { using (Stream stmXML = new MemoryStream()) { zip.Extract(strFile, stmXML); stmXML.Seek(0, SeekOrigin.Begin); AnalizeFileVersionList(stmXML, zip); } } //Extrae inforamción EXIF de las imágenes embebidas en el documento foreach (string strFileName in zip.EntryFileNames) { string strFileNameLo = strFileName.ToLower(); //Filtro que obtiene las imagenes *.jpg, *.jpeg dentro de la carpeta "Pictures/" if (strFileNameLo.StartsWith("pictures/") && (strFileNameLo.EndsWith(".jpg") || strFileNameLo.EndsWith(".jpeg") || strFileNameLo.EndsWith(".png"))) { using (Stream stmXML = new MemoryStream()) { zip.Extract(strFileName, stmXML); stmXML.Seek(0, SeekOrigin.Begin); using (EXIFDocument eDoc = new EXIFDocument(stmXML)) { FileMetadata exifMetadata = eDoc.AnalyzeFile(); //Añadimos al diccionario la imagen encontrada junto con la información EXIF de la misma this.foundMetadata.EmbeddedImages.Add(System.IO.Path.GetFileName(strFileName), exifMetadata); //Los usuarios de la información EXIF se añaden a los usuarios del documento this.foundMetadata.AddRange(exifMetadata.Users.ToArray()); this.foundMetadata.AddRange(exifMetadata.Applications.ToArray()); } } } } } //Buscamos usuarios en las rutas del documento foreach (Diagrams.Path ri in this.foundMetadata.Paths) { string strUser = PathAnalysis.ExtractUserFromPath(ri.Value); if (!string.IsNullOrEmpty(strUser)) { this.foundMetadata.Add(new User(strUser, ri.IsComputerFolder, "Path: " + ri.Value)); } } } catch (Exception e) { System.Diagnostics.Debug.WriteLine(String.Format("Error analyzing OpenOffice document ({0})", e.ToString())); } return(this.foundMetadata); }
public async Task HandleAsync_ResolvesAndWritesFileCorrectlyToGivenStream(string fileName) { var cancellationToken = new CancellationToken(); var httpContext = new DefaultHttpContext(); var contentRootPath = Environment.CurrentDirectory; var filePath = Path.Combine(contentRootPath, "Files", fileName); Assert.IsTrue(System.IO.File.Exists(filePath), $"Expected the {fileName} file to be accessible in the test environment"); var fileInfo = new FileInfo(filePath); var fileBuffer = await System.IO.File.ReadAllBytesAsync(filePath, cancellationToken); using var responseBodyStream = new MemoryStream(fileBuffer.Length); httpContext.Response.Body = responseBodyStream; var fileRepository = new Moq.Mock <IFileRepository>(); var fileRepositoryInvoked = false; var services = new ServiceCollection(); services.AddScoped(sp => fileRepository.Object); httpContext.RequestServices = services.BuildServiceProvider(); var fileVersion = Guid.NewGuid().ToString(); using var algo = MD5.Create(); var contentHash = algo.ComputeHash(fileBuffer); var fileMetadata = new FileMetadata("title", "description", "group-name", fileVersion, "owner", fileName, fileInfo.Extension, (ulong)fileInfo.Length, "blobName", DateTimeOffset.UtcNow, Convert.ToBase64String(contentHash), FileStatus.Verified); var fileWriteDetails = new FileWriteDetails(fileVersion, "content-type", contentHash, (ulong)fileBuffer.Length, "content-encoding", "content-language", DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, fileMetadata); fileRepository. Setup(x => x.WriteToStreamAsync(Moq.It.IsAny <FileMetadata>(), Moq.It.IsAny <Stream>(), Moq.It.IsAny <CancellationToken>())). Callback(async(FileMetadata givenFileMetadata, Stream givenStream, CancellationToken givenCancellationToken) => { Assert.IsFalse(givenFileMetadata.IsEmpty); Assert.IsNotNull(givenStream); Assert.IsFalse(givenCancellationToken.IsCancellationRequested, "Expected the cancellation token to not be cancelled"); Assert.AreSame(responseBodyStream, givenStream, "Expected the SUT to as the repository to write the file to the stream it was asked to"); Assert.AreSame(fileName, givenFileMetadata.Name, "Expected the SUT to request the file from the repository whose name it was provided with"); Assert.AreSame(fileVersion, givenFileMetadata.Version, "Expected the SUT to request the file version from the repository that it was provided with"); Assert.AreEqual(cancellationToken, givenCancellationToken, "Expected the same cancellation token to propagate between service interfaces"); await givenStream.WriteAsync(fileBuffer, cancellationToken); await givenStream.FlushAsync(cancellationToken); fileRepositoryInvoked = true; }). Returns(Task.FromResult(fileWriteDetails)); fileRepository.Setup(x => x.GetMetadataAsync(Moq.It.IsAny <File>(), Moq.It.IsAny <CancellationToken>())).Returns(Task.FromResult(fileMetadata)); var accessToken = Guid.NewGuid().ToString(); var file = File.With(fileName, fileVersion); var getFileWopiRequest = GetFileWopiRequest.With(file, accessToken); await getFileWopiRequest.HandleAsync(httpContext, cancellationToken); Assert.IsTrue(fileRepositoryInvoked, "Expected the SUT to defer to the file repository with the correct parameters"); Assert.AreEqual(fileBuffer.Length, responseBodyStream.Length, "All bytes in the file should be written to the target stream"); Assert.IsTrue(httpContext.Response.Headers.ContainsKey("X-WOPI-ItemVersion"), "Expected the X-WOPI-ItemVersion header to have been written to the response"); Assert.IsNotNull(httpContext.Response.Headers["X-WOPI-ItemVersion"], "Expected the X-WOPI-ItemVersion header in the response to not be null"); }
private async void RefreshLyricsAsync(PlayableTrack track) { if (track == null) { return; } if (this.previousTrack != null && this.previousTrack.Equals(track)) { return; } this.previousTrack = track; this.StopHighlighting(); FileMetadata fmd = await this.metadataService.GetFileMetadataAsync(track.Path); await Task.Run(() => { // If we're in editing mode, delay changing the lyrics. if (this.LyricsViewModel != null && this.LyricsViewModel.IsEditing) { this.updateLyricsAfterEditingTimer.Start(); return; } // No FileMetadata available: clear the lyrics. if (fmd == null) { this.ClearLyrics(); return; } }); try { Lyrics lyrics = null; bool mustDownloadLyrics = false; await Task.Run(async() => { // Try to get lyrics from the audio file lyrics = new Lyrics(fmd != null && fmd.Lyrics.Value != null ? fmd.Lyrics.Value : String.Empty, string.Empty); lyrics.SourceType = SourceTypeEnum.Audio; // If the audio file has no lyrics, try to find lyrics in a local lyrics file. if (!lyrics.HasText) { var lrcFile = Path.Combine(Path.GetDirectoryName(fmd.Path), Path.GetFileNameWithoutExtension(fmd.Path) + FileFormats.LRC); if (File.Exists(lrcFile)) { using (var fs = new FileStream(lrcFile, FileMode.Open, FileAccess.Read)) { using (var sr = new StreamReader(fs, Encoding.Default)) { lyrics = new Lyrics(await sr.ReadToEndAsync(), String.Empty); if (lyrics.HasText) { lyrics.SourceType = SourceTypeEnum.Lrc; return; } } } } // If we still don't have lyrics and the user enabled automatic download of lyrics: try to download them online. if (SettingsClient.Get <bool>("Lyrics", "DownloadLyrics")) { string artist = fmd.Artists != null && fmd.Artists.Values != null && fmd.Artists.Values.Length > 0 ? fmd.Artists.Values[0] : string.Empty; string title = fmd.Title != null && fmd.Title.Value != null ? fmd.Title.Value : string.Empty; if (!string.IsNullOrWhiteSpace(artist) & !string.IsNullOrWhiteSpace(title)) { mustDownloadLyrics = true; } } } }); // No lyrics were found in the file: try to download. if (mustDownloadLyrics) { this.IsDownloadingLyrics = true; try { var factory = new LyricsFactory(SettingsClient.Get <int>("Lyrics", "TimeoutSeconds"), SettingsClient.Get <string>("Lyrics", "Providers"), this.info); lyrics = await factory.GetLyricsAsync(fmd.Artists.Values[0], fmd.Title.Value); lyrics.SourceType = SourceTypeEnum.Online; } catch (Exception ex) { LogClient.Error("Could not get lyrics online {0}. Exception: {1}", track.Path, ex.Message); } this.IsDownloadingLyrics = false; } await Task.Run(() => { this.LyricsViewModel = new LyricsViewModel(container, track); this.LyricsViewModel.SetLyrics(lyrics); }); } catch (Exception ex) { this.IsDownloadingLyrics = false; LogClient.Error("Could not show lyrics for Track {0}. Exception: {1}", track.Path, ex.Message); this.ClearLyrics(); return; } this.StartHighlighting(); }
/// <summary> /// Add metadata to the set. /// </summary> public void Add(FileMetadata metadata) { System.Diagnostics.Debug.Assert( filenameCanonical == null || metadata.filenameCanonical.Equals(filenameCanonical)); metadataByVersion[metadata.CalculateVersion()] = metadata; }
private static async Task <string> downloadZipFile(string path, string folderName) { FileStream archivo = null; try { clientConf = new DropboxClientConfig("ScandaV1"); client = new DropboxClient(APITOKEN); path = "/" + path; var x = await client.Files.DownloadAsync(path); FileMetadata metadata = x.Response; archivo = File.Create(metadata.Name); archivo.Close(); // Stream stream = await x.GetContentAsStreamAsync(); // //stream.CopyTo(archivo); byte[] buff = new byte[CHUNK_SIZE]; int read; while (0 < (read = stream.Read(buff, 0, CHUNK_SIZE))) { using (var appedS = new FileStream(metadata.Name, FileMode.Append)) { appedS.Write(buff, 0, read); } } /* * var y = await x.GetContentAsStreamAsync(); * * Stream stream = y; * stream.CopyTo(archivo); * * * var y = await x.GetContentAsByteArrayAsync(); * File.WriteAllBytes(metadata.Name, y); */ return(metadata.Name); } catch (OutOfMemoryException ex) { await Logger.sendLog(string.Format("{0} | {1} | {2}", ex.Message, ex.StackTrace, "Scanda.AppTray.ScandaConector.downloadZipFile"), "E"); Console.WriteLine("Se acabo la memoria"); return(null); } catch (FileNotFoundException ex) { await Logger.sendLog(string.Format("{0} | {1} | {2}", ex.Message, ex.StackTrace, "Scanda.AppTray.ScandaConector.downloadZipFile"), "E"); Console.WriteLine("No existe el archivo"); return(null); } catch (AggregateException ex) //Excepciones al vuelo { await Logger.sendLog(string.Format("{0} | {1} | {2}", ex.Message, ex.StackTrace, "Scanda.AppTray.ScandaConector.downloadZipFile"), "E"); Console.WriteLine("Tarea Cancelada"); return(null); } catch (Exception ex) { await Logger.sendLog(string.Format("{0} | {1} | {2}", ex.Message, ex.StackTrace, "Scanda.AppTray.ScandaConector.downloadZipFile"), "E"); Console.WriteLine("Exepcion general "); Console.WriteLine(ex); return(null); } finally { if (archivo != null) { archivo.Close(); archivo.Dispose(); } } }
/// <summary> /// Parse current and obsolete file references from a package's /// manifest files. /// </summary> /// <param name="metadataByVersion">Metadata for files ordered by /// version number. If the metadata does not have the isManifest /// attribute it is ignored.</param> /// <param name="metadataSet">Set of all metadata files in the /// project. This is used to handle file renaming in the parsed /// manifest. If the manifest contains files that have been /// renamed it's updated with the new filenames.</param> /// <returns>true if data was parsed from the specified file metadata, /// false otherwise.</returns> public bool ParseManifests(FileMetadataByVersion metadataByVersion, FileMetadataSet metadataSet) { currentFiles = new HashSet<string>(); obsoleteFiles = new HashSet<string>(); int versionIndex = 0; int numberOfVersions = metadataByVersion.Values.Count; foreach (FileMetadata metadata in metadataByVersion.Values) { versionIndex++; if (!metadata.isManifest) return false; bool manifestNeedsUpdate = false; HashSet<string> filesInManifest = versionIndex < numberOfVersions ? obsoleteFiles : currentFiles; StreamReader manifestFile = new StreamReader(metadata.filename); string line; while ((line = manifestFile.ReadLine()) != null) { var manifestFileMetadata = new FileMetadata(line.Trim()); string filename = manifestFileMetadata.filename; // Check for a renamed file. var existingFileMetadata = metadataSet.FindMetadata( manifestFileMetadata.filenameCanonical, manifestFileMetadata.CalculateVersion()); if (existingFileMetadata != null && !manifestFileMetadata.filename.Equals( existingFileMetadata.filename)) { filename = existingFileMetadata.filename; manifestNeedsUpdate = true; } filesInManifest.Add(filename); } manifestFile.Close(); // If this is the most recent manifest version, remove all // current files from the set to delete. if (versionIndex == numberOfVersions) { currentMetadata = metadata; foreach (var currentFile in filesInManifest) { obsoleteFiles.Remove(currentFile); } } // Rewrite the manifest to track renamed files. if (manifestNeedsUpdate) { File.Delete(metadata.filename); var writer = new StreamWriter(metadata.filename); foreach (var filename in filesInManifest) { writer.WriteLine(filename); } writer.Close(); } } this.filenameCanonical = metadataByVersion.filenameCanonical; return true; }
public TableEntry(FileMetadata metadata) { this.metadata = metadata; }
private IList<FileMetadata> GetFileMetadataForCommonUser() { var files = _files.GetBlobs(); var filesPrefix = _files.Uri + "/" + _loggedUserId + "/"; var fileOwner = GetUserEmailByUserId(_loggedUserId); var fileMetadata = new List<FileMetadata>(); foreach (var file in files) { var fileUri = file.Uri.ToString(); if (!fileUri.Contains(filesPrefix)) continue; var fileName = fileUri.Substring(filesPrefix.Length); var fileContentType = Shared.GetContentType(fileName); var fileSize = ConvertBytesToKilobytes(file.Properties.Length); var metadata = new FileMetadata(fileName, fileContentType, fileUri, fileOwner, fileSize); fileMetadata.Add(metadata); } return fileMetadata; }
static async Task NotificationHandler() { try { var token = notificationCancellation.Token; var fpns = new List <FilePublishedNotification>(); while (token.IsCancellationRequested == false) { fpns.Clear(); while (filesPublishedNotifications.IsEmpty == false) { if (filesPublishedNotifications.TryDequeue(out FilePublishedNotification fn)) { fpns.Add(fn); } else { break; } } var groupCounts = new Dictionary <string, int>(); foreach (var fpn in fpns) { if (groupCounts.TryGetValue(fpn.GroupId, out int count) == false) { groupCounts[fpn.GroupId] = 0; } groupCounts[fpn.GroupId] = groupCounts[fpn.GroupId] + 1; } foreach (var groupId in groupCounts.Keys) { if (token.IsCancellationRequested) { return; } var maxCount = groupCounts[groupId]; UserGroupPermissions permissions = null; if (groupPermissionLookup.TryGetValue(groupId, out permissions) == false) { continue; } var filesInfo = await client.GetDownloadableFilesInfoAsync(groupId, permissions.GroupOrganizerUserId, maxCount); foreach (var fi in filesInfo) { // NOTE: if file is too large to download within the allowed timeout, might want to skip it // if (fi.FileSize > MaxDownloadFileSize) // continue; // download, decrypt, and uncompress file Console.WriteLine("-------------------------------------"); Console.WriteLine($"Downloading {fi.FileName}, Decrypted Size {fi.FileSize} bytes"); await client.DownloadFileAsync(fi.FileId, fi.GroupId, async (downloadCipherTextStream) => { string directory = null; try { directory = Path.Combine(downloadDirectory, fi.GroupId, fi.PublisherUserId); if (Directory.Exists(directory) == false) { Directory.CreateDirectory(directory); } directory = Path.Combine(decryptedDirectory, $"{fi.OrganizerEmail} - {fi.GroupName}", fi.PublisherEmail); if (Directory.Exists(directory) == false) { Directory.CreateDirectory(directory); } } catch (Exception dx) { Console.WriteLine($"Error creating directories. Exception: {dx.Message}"); return; } try { var plainTextPath = Path.Combine(directory, $"{fi.FileName}-{fi.FileId}{Path.GetExtension(fi.FileName)}"); var cipherTextPath = Path.Combine(workingDirectory, $"{fi.FileId}.brx"); var uncompressPath = Path.Combine(workingDirectory, $"{fi.FileId}.bru"); FileMetadata metadata = null; byte[] md5 = null; using (var plainTextStream = File.Create(plainTextPath)) using (var cipherTextStream = File.Create(cipherTextPath)) using (var uncompressStream = File.Create(uncompressPath)) { // download await downloadCipherTextStream.CopyToAsync(cipherTextStream); // decrypt and uncompress metadata = await client.UnprotectGroupFileAsync(cipherTextStream, uncompressStream, plainTextStream); plainTextStream.Position = 0; md5 = Client.ComputeMD5Hash(plainTextStream); } File.Delete(uncompressPath); File.Delete(cipherTextPath); if (md5.SequenceEqual(metadata.MD5Hash) == false) { Console.WriteLine($"WARNING: Possible corruption or tampering with file {plainTextPath} for group {fi.GroupId}"); } else { Console.WriteLine($"Saved decrypted file {fi.FileName} to {plainTextPath}"); Console.WriteLine(); } } catch (Exception x) { Console.WriteLine($"Error downloading file {fi.FileId} for group {fi.GroupId}. Exception: {x.Message}"); } }); } } await Task.Delay(2000); } } catch (Exception nx) { Console.WriteLine($"Notification handler exception: {nx.Message} stack: {nx.StackTrace}"); } }
public JsonMetadataAdapter(FileMetadata fileMetadata) { this.fileMetadata = fileMetadata; }
protected override Task SetFileMetadataImplAsync(string path, FileMetadata metadata, CancellationToken cancel = default) { path = MapPathVirtualToPhysical(path); return(base.SetFileMetadataImplAsync(path, metadata, cancel)); }
/// <summary> /// Downloads a file. /// </summary> /// <remarks>This demonstrates calling a download style api in the Files namespace.</remarks> /// <param name="folder">The folder path in which the file should be found.</param> /// <param name="file">The file to download within <paramref name="folder"/>.</param> /// <returns></returns> private async Task Download(string folder, FileMetadata file) { Console.WriteLine("Download file..."); using (var response = await this.client.Files.DownloadAsync(folder + "/" + file.Name)) { Console.WriteLine("Downloaded {0} Rev {1}", response.Response.Name, response.Response.Rev); Console.WriteLine("------------------------------"); Console.WriteLine(await response.GetContentAsStringAsync()); Console.WriteLine("------------------------------"); } }
public DropboxDeleteSuccessResult(FileMetadata metadata) { _metaData = metadata; }
private static FileModel Load( IDocumentProcessor processor, ImmutableDictionary<string, object> metadata, FileMetadata fileMetadata, FileAndType file) { using (new LoggerFileScope(file.File)) { Logger.LogVerbose($"Plug-in {processor.Name}: Loading..."); var path = Path.Combine(file.BaseDir, file.File); metadata = ApplyFileMetadata(path, metadata, fileMetadata); return processor.Load(file, metadata); } }
public static Article FromMetadata(FileMetadata metadata, HtmlString content) { var parsed = metadata.Name.ParseBlogFileName(); return new Article( metadata.Name, ArticleMetadata.Parse(metadata.Name, metadata.Rev), content); }
/// <summary> /// Returns a File that represents the current state of the file that this FileEntry represents. /// </summary> /// <param name="filePath">filePath to entry</param> /// <returns></returns> public void getFileMetadata(string options) { try { try { fileOptions = JSON.JsonHelper.Deserialize<FileOptions>(options); } catch (Exception e) { DispatchCommandResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION)); return; } FileMetadata metaData = new FileMetadata(fileOptions.FullPath); DispatchCommandResult(new PluginResult(PluginResult.Status.OK, metaData, "window.localFileSystem._castDate")); } catch (SecurityException e) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, new ErrorCode(SECURITY_ERR))); } catch (IsolatedStorageException e) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, new ErrorCode(NOT_READABLE_ERR))); } catch (FileNotFoundException e) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, new ErrorCode(NOT_FOUND_ERR))); } }
public bool UpdateFile(string filename, FileMetadata blobInfo) { //write file from cloud (specifed by BlobInfo) to local disk return(false); }
private Match FindMatch(FileMetadata input, string expression, int offset) { var regexInput = GetRegexInput(input); if (offset < 0 || offset >= regexInput.Length) { return Match.Empty; } var regex = _iRegexProvider.GetRegex(expression, RegexOptions.IgnoreCase); return regex.Match(regexInput, offset); }
public static string GetFirstAlbumArtist(FileMetadata iFileMetadata) { return(string.IsNullOrWhiteSpace(iFileMetadata.AlbumArtists.Value) ? Defaults.UnknownAlbumArtistString : MetadataUtils.SanitizeTag(MetadataUtils.PatchID3v23Enumeration(iFileMetadata.AlbumArtists.Values).FirstNonEmpty(Defaults.UnknownAlbumArtistString))); }
/// <summary> /// Returns a File that represents the current state of the file that this FileEntry represents. /// </summary> /// <param name="filePath">filePath to entry</param> /// <returns></returns> public void getFileMetadata(string options) { string[] optStings = getOptionStrings(options); string filePath = optStings[0]; string callbackId = optStings[1]; if (filePath != null) { try { FileMetadata metaData = new FileMetadata(filePath); DispatchCommandResult(new PluginResult(PluginResult.Status.OK, metaData), callbackId); } catch (IsolatedStorageException) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, NOT_READABLE_ERR), callbackId); } catch (Exception ex) { if (!this.HandleException(ex)) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, NOT_READABLE_ERR), callbackId); } } } }
public async Task <bool> SaveTracksAsync() { if (!this.AllEntriesValid()) { return(false); } List <FileMetadata> fmdList = new List <FileMetadata>(); this.IsBusy = true; await Task.Run(() => { try { foreach (TrackInfo ti in this.trackInfos) { var fmd = new FileMetadata(ti.Path); if (this.artists.IsValueChanged) { fmd.Artists = this.artists; } if (this.title.IsValueChanged) { fmd.Title = this.title; } if (this.album.IsValueChanged) { fmd.Album = this.album; } if (this.albumArtists.IsValueChanged) { fmd.AlbumArtists = this.albumArtists; } if (this.year.IsValueChanged) { fmd.Year = this.year; } if (this.trackNumber.IsValueChanged) { fmd.TrackNumber = this.trackNumber; } if (this.trackCount.IsValueChanged) { fmd.TrackCount = this.trackCount; } if (this.discNumber.IsValueChanged) { fmd.DiscNumber = this.discNumber; } if (this.discCount.IsValueChanged) { fmd.DiscCount = this.discCount; } if (this.genres.IsValueChanged) { fmd.Genres = this.genres; } if (this.grouping.IsValueChanged) { fmd.Grouping = this.grouping; } if (this.comment.IsValueChanged) { fmd.Comment = this.comment; } if (this.artwork.IsValueChanged) { fmd.ArtworkData = this.artwork; } fmdList.Add(fmd); } } catch (Exception ex) { LogClient.Instance.Logger.Error("An error occured while setting the metadata. Exception: {0}", ex.Message); } }); if (fmdList.Count > 0) { await this.metadataService.UpdateTrackAsync(fmdList, this.UpdateAlbumArtwork); } this.IsBusy = false; return(true); }
/// <summary> /// Get the cached copy of a file's metadata by index. The metadata will be for the last retrieved or successfully saved version, and will not include any local changes that have not been /// committed by calling SaveFile. The returned pointer must be released by the user when no longer needed. /// <seealso cref="GetFileMetadataCount" /> /// <seealso cref="Release" /> /// </summary> /// <param name="copyFileMetadataOptions">Object containing properties related to which user is requesting metadata, and at what index</param> /// <param name="outMetadata">A copy of the FileMetadata structure will be set if successful. This data must be released by calling <see cref="Release" />.</param> /// <returns> /// <see cref="Result" />::<see cref="Result.Success" /> if the requested metadata is currently cached, otherwise an error result explaining what went wrong /// </returns> public Result CopyFileMetadataAtIndex(CopyFileMetadataAtIndexOptions copyFileMetadataOptions, out FileMetadata outMetadata) { System.IntPtr copyFileMetadataOptionsAddress = new System.IntPtr(); Helper.TryMarshalSet <CopyFileMetadataAtIndexOptionsInternal, CopyFileMetadataAtIndexOptions>(ref copyFileMetadataOptionsAddress, copyFileMetadataOptions); var outMetadataAddress = System.IntPtr.Zero; var funcResult = EOS_PlayerDataStorage_CopyFileMetadataAtIndex(InnerHandle, copyFileMetadataOptionsAddress, ref outMetadataAddress); Helper.TryMarshalDispose(ref copyFileMetadataOptionsAddress); if (Helper.TryMarshalGet <FileMetadataInternal, FileMetadata>(outMetadataAddress, out outMetadata)) { EOS_PlayerDataStorage_FileMetadata_Release(outMetadataAddress); } return(funcResult); }
public bool DeleteFile(FileMetadata aFileMetadata) { return(false); }
/// <summary> /// Returns a File that represents the current state of the file that this FileEntry represents. /// </summary> /// <param name="filePath">filePath to entry</param> /// <returns></returns> public void getFileMetadata(string options) { string filePath = getSingleStringOption(options); if (filePath != null) { try { FileMetadata metaData = new FileMetadata(filePath); DispatchCommandResult(new PluginResult(PluginResult.Status.OK, metaData)); } catch (IsolatedStorageException) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, NOT_READABLE_ERR)); } catch (Exception ex) { if (!this.HandleException(ex)) { DispatchCommandResult(new PluginResult(PluginResult.Status.ERROR, NOT_READABLE_ERR)); } } } }
public static string GetFirstGenre(FileMetadata fmd) { return(string.IsNullOrWhiteSpace(fmd.Genres.Value) ? Defaults.UnknownGenreString : MetadataUtils.PatchID3v23Enumeration(fmd.Genres.Values).FirstNonEmpty(Defaults.UnknownGenreString)); }