public string NewMediaObject(FileData mediaObject) { var mediaFile = new WorkflowMediaFile { FileName = mediaObject.name, Title = mediaObject.name, Culture = DataLocalizationFacade.DefaultLocalizationCulture.Name, Length = mediaObject.bits.Count(), MimeType = MimeTypeInfo.GetCanonical(mediaObject.type) }; if (mediaFile.MimeType == MimeTypeInfo.Default) { mediaFile.MimeType = MimeTypeInfo.GetCanonicalFromExtension(Path.GetExtension(mediaFile.FileName)); } using (Stream readStream = new MemoryStream(mediaObject.bits)) { using (Stream writeStream = mediaFile.GetNewWriteStream()) { readStream.CopyTo(writeStream); } } string folderPath = string.Format("/Blog/{0}/{1:yyyy-MM-dd}", Author.Name, DateTime.Now); mediaFile.FolderPath = ForceGetMediaFolderPath(folderPath); var addedFile = DataFacade.AddNew<IMediaFile>(mediaFile); return MediaUrlHelper.GetUrl(addedFile); }
public Block(StorageOptions storageOptions, ReadOptions readOptions, BlockHandle handle, FileData fileData) { try { _handle = handle; _storageOptions = storageOptions; _fileData = fileData; if (handle.Position > fileData.Size || (handle.Position + handle.Count + BlockTrailerSize) > fileData.Size) throw new CorruptedDataException("The specified accessor is beyond the bounds of the provided mappedFile"); _accessor = _fileData.File.CreateAccessor(handle.Position, handle.Count + BlockTrailerSize); if (readOptions.VerifyChecksums) { var crc = Crc.Unmask(_accessor.ReadInt32(handle.Count + 1)); var actualCrc = CalculateActualCrc(handle.Count + 1); // data + tag if (crc != actualCrc) throw new CorruptedDataException("block checksum mismatch"); } RestartsCount = _accessor.ReadInt32(handle.Count - sizeof(int)); RestartsOffset = handle.Count - (RestartsCount * sizeof(int)) - sizeof(int); if (RestartsOffset > handle.Count) throw new CorruptedDataException("restart offset wrapped around"); } catch (Exception) { Dispose(); throw; } }
public bool UploadFileData(FileData fileData) { bool result = false; try { string FilePath = Path.Combine(ConfigurationManager.AppSettings["PATH"], fileData.FileName); if (fileData.FilePosition == 0) { CreateDirectoryIfNotExists(FilePath); File.Create(FilePath).Close(); } using (FileStream fileStream = new FileStream(FilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read)) { fileStream.Seek(fileData.FilePosition, SeekOrigin.Begin); fileStream.Write(fileData.BufferData, 0, fileData.BufferData.Length); } } catch (Exception ex) { ErrorDetails ed = new ErrorDetails(); ed.ErrorCode = 1001; ed.ErrorMessage = ex.Message; throw new FaultException<ErrorDetails>(ed); } return result; }
public FileImpl(FileData fileData) { m_fileData = fileData; m_fileList = new ObservableCollection<IContainer>(); m_properties = new Dictionary<string, string>(); this.WasReadOnly = fileData.WasReadOnly; }
/// <summary> /// Constructor Parametrizado /// </summary> /// <param name="remoteNetUser">El usuario remoto con el cual se intercambiará el archivo</param> /// <param name="sendMessageDelegate">Un método con el cual se puedan envíar mensajes a la MANET</param> /// <param name="fileInformation">La información del archivo</param> /// <param name="FileData">Los parámetros de configuración</param> public FileMessageReceiver(NetUser remoteNetUser, SendMessageDelegate sendMessageDelegate, FileInformation fileInformation, FileData fileData) : base(remoteNetUser, sendMessageDelegate, fileInformation, fileData) { thisLock = new Object(); this.Type = FileMessageHandlerType.DOWNLOAD; this.downloadDir = fileData.DownloadDir; }
public void TestDeleteFile() { FileData fd = new FileData("id", _fileName, _fileData); _fileQuery.Delete(fd); Assert.IsFalse(new FileInfo(Path.Combine(_workDirectory, _fileName)).Exists); }
public void TestAddFile() { FileData fd = new FileData("id", _fileName, _fileData); _fileQuery.Insert(fd); FileInfo fi = new FileInfo(Path.Combine(_workDirectory,_fileName)); Assert.IsTrue(fi.Exists); }
public void TestFileData() { AbstractFlight objFlight = null; IFileData handleFileData = new FileData(); System.IO.StreamReader file = new System.IO.StreamReader(Directory.GetCurrentDirectory() + "\\input1.txt"); objFlight = handleFileData.ProcessFile(file); Assert.AreEqual<double>(objFlight.TotAdjRev, 750); Assert.AreEqual<double>(objFlight.TotalCostOfFlight, 600); Assert.AreEqual<double>(objFlight.lstPassengers.Count, 6); Assert.AreEqual<decimal>(objFlight.FlightAircraft.NoOfSeats, 12); Assert.AreEqual<decimal>(objFlight.MinTakeOffLoadPercent, 75); file = new System.IO.StreamReader(Directory.GetCurrentDirectory() + "\\input.txt"); objFlight = handleFileData.ProcessFile(file); Assert.AreEqual<double>(objFlight.TotAdjRev, 1010); Assert.AreEqual<double>(objFlight.TotalCostOfFlight, 800); Assert.AreEqual<double>(objFlight.lstPassengers.Count, 8); Assert.AreEqual<decimal>(objFlight.FlightAircraft.NoOfSeats, 8); Assert.AreEqual<decimal>(objFlight.MinTakeOffLoadPercent, 75); }
/// <summary> /// Copies the given file on "fromPath" to the "toPath". /// </summary> /// <param name="fromPath">Relative path under configured application "resources" directory. /// A <see cref="System.String"/> /// </param> /// <param name="toPath">Relative path under configured application "documents" directory. See GetDirectoryRoot(). /// A <see cref="System.String"/> /// </param> /// <returns> /// A <see cref="System.Boolean"/> /// </returns> public bool CopyFromResources(string fromPath, string toPath) { try { DirectoryData resourcesDir = GetDirectoryResources (); string fromFilePath = Path.Combine (resourcesDir.FullName, fromPath); FileData sourceFile = new FileData (fromFilePath); if (ExistsFile (sourceFile)) { DirectoryData rootDir = GetDirectoryRoot (); string toFilePath = Path.Combine (rootDir.FullName, toPath); try { File.Copy (fromFilePath, toFilePath); } catch (Exception ex) { SystemLogger.Log (SystemLogger.Module .CORE, "Error copying from file [" + fromFilePath + "] to file [" + toFilePath + "]", ex); return false; } return true; } else { SystemLogger.Log (SystemLogger.Module .CORE, "Error copying from file [" + fromFilePath + "]. File does not exists."); } } catch (Exception) { SystemLogger.Log (SystemLogger.Module .CORE, "Error copying from file [" + fromPath + "]. Unhandled exception."); } return false; }
public void TestDeleteFileNotExist() { string notExistingFileName = "somefilethatdoesnotexist.txt"; FileData fd = new FileData("id", notExistingFileName, _fileData); _fileQuery.Delete(fd); Assert.IsFalse(new FileInfo(Path.Combine(_workDirectory, notExistingFileName)).Exists); }
public override async Task<bool> DeleteFile(FileData file) { var path = GetFilePath(file); var fileToDelete = await GetStorageItem(path, false); if (fileToDelete == null) return false; await fileToDelete.DeleteAsync(StorageDeleteOption.PermanentDelete); return true; }
/// <summary> /// Constructor parametrizado /// </summary> /// <param name="remoteNetUser">El usuario con quien se intercambia el archivo</param> /// <param name="remoteFileHandlerId">El id de la transferencia de archivo</param> /// <param name="sendMessageDelegate">Una función por la que se pueda enviar un mensaje</param> /// <param name="fileInformation">Información del archivo</param> /// <param name="fileData">Datos de configuración de archivos</param> public FileMessageSender(NetUser remoteNetUser, Guid remoteFileHandlerId, SendMessageDelegate sendMessageDelegate, FileInformation fileInformation, FileData fileData) : base(remoteNetUser, sendMessageDelegate, fileInformation, fileData) { FileName = fileInformation.Path; currentPart = 0; this.Type = FileMessageHandlerType.UPLOAD; this.Id = remoteFileHandlerId; }
public static ResourceFile UploadResourceFile(FileData files, int resourceId) { ResourceFile resFileRet = new ResourceFile(); //we get the information using (ResourcesDataContext dc = new ResourcesDataContext()) { //check if we have the file type int typeid = 0; var type = (from d in dc.bhdFileTypes where d.contentType == files.fileType && d.isActive select d).FirstOrDefault(); if (type == null) { bhdFileType ft = new bhdFileType(); ft.contentType = files.fileType; string extension = files.fileName; int pos = extension.LastIndexOf('.'); ft.extension = extension.Substring(pos + 1, extension.Length - (pos + 1)); ft.isActive = true; dc.bhdFileTypes.InsertOnSubmit(ft); dc.SubmitChanges(); typeid = ft.id; } else { typeid = type.id; } bhdFile f = new bhdFile(); f.size = files.fileSize; f.name = files.fileName; f.isActive = true; f.fileTypeId = typeid; dc.bhdFiles.InsertOnSubmit(f); dc.SubmitChanges(); bhdFileData fd = new bhdFileData(); fd.fileId = f.id; fd.data = files.fileData; dc.bhdFileDatas.InsertOnSubmit(fd); dc.SubmitChanges(); bhdResourceFile rf = new bhdResourceFile(); rf.resourceId = resourceId; rf.fileId = f.id; dc.bhdResourceFiles.InsertOnSubmit(rf); dc.SubmitChanges(); ResourceFile tmpFileRes = new ResourceFile(); tmpFileRes.fileid = rf.fileId; tmpFileRes.filename = f.name; resFileRet = tmpFileRes; } return resFileRet; }
public void AddFile(string fileName) { FileInfo fileInfo = new FileInfo(fileName); FileData fileData = new FileData(fileInfo); if (testCaseFiles == null) testCaseFiles = new SortedSetAny<FileData>(); testCaseFiles.Add(fileData); fileData.SetFileBytes(); }
public void OpenItem(FileData data) { if (this.IsUpdating || data?.FullPath == null) { return; } Factory.Resolve<OpenFileCommand>().Execute(data.FullPath); }
static void Main(string[] args) { string filename = args[0]; int files = 5; int subpacks = 2; PackFile packData = new PackFile(); using(FileStream s = File.Open(filename, FileMode.Open, FileAccess.Read)) using(BinaryReader reader = new BEBinaryReader(s)) { while(true) { packData.header = reader.ReadInt32(); if (packData.header == 0) break; packData.filesInPack = reader.ReadInt32(); //packData.header.i3 = reader.ReadInt32(); Console.WriteLine("Pack Header"); Console.WriteLine("First Int: " + (uint)packData.header); Console.WriteLine("Second Int: " + (uint)packData.filesInPack); files = packData.filesInPack; for (int i = 0; i < files; i++) { Console.WriteLine("File " + i); string readName = reader.ReadString(); packData.filenames.Add(readName); Console.WriteLine("Name: " + packData.filenames[i]); FileData fileDat = new FileData(); fileDat.offset = reader.ReadInt32(); fileDat.size = reader.ReadInt32(); fileDat.i3 = reader.ReadInt32(); packData.fileData.Add(fileDat); Console.WriteLine("Data: ( " + packData.fileData[i].offset + " , " + packData.fileData[i].size + ", " + (uint)packData.fileData[i].i3 + " )"); } s.Seek(packData.header, SeekOrigin.Begin); } for (int i = 0; i < packData.fileData.Count; i++) { using (FileStream outfile = File.Open(packData.filenames[i], FileMode.Create, FileAccess.Write)) { s.Seek(packData.fileData[i].offset, SeekOrigin.Begin); byte[] file = reader.ReadBytes(packData.fileData[i].size); outfile.Write(file, 0, file.Length); } } } }
public CompressedContainerImpl(FileData fileData, bool expanded, ICompressor compressor, IDecompressor decompressor) : base(fileData) { m_decompressor = decompressor; m_compressor = compressor; m_fileData.FileType = Workshare.Policy.FileType.ZIP; m_expanded = expanded; // We want to know when someone adds to the collection so that we can mark it as expanded. this.Files.CollectionChanged += new System.Collections.Specialized.NotifyCollectionChangedEventHandler(FilesCollectionChanged); }
public void DoesNotContainANumber_WhenStringDoesContainANumber_WritesTraceMessage() { var listener = new TestListener(); Trace.Listeners.Add(listener); var data = new FileData { StringValue = "1" }; Assert.Throws<ArgumentException>(() => data.DoesNotContainANumber()); Assert.AreEqual("Test:DoesNotContainANumberFailed:DoesNotContainANumber", listener.Output); Trace.Listeners.Remove(listener); }
public void setMetatdata(FileData f, string metadata) { foreach (ContentManager.EntryElement ee in e.CurrentRoot.Children) { if (f.FileName.Equals(ee.File.FileName)) { e.addMetadata(ee.File, metadata); break; } } }
public void IsNotNull_WhenValueIsNull_WritesTraceMessage() { var listener = new TestListener(); Trace.Listeners.Add(listener); var data = new FileData { StringValue = null }; Assert.Throws<ArgumentException>(() => data.IsNotNull()); Assert.AreEqual("Test:IsNotNullFailed:IsNotNull", listener.Output); Trace.Listeners.Remove(listener); }
public SelectionImpl(FileData fileData) : base(fileData) { if (null == m_fileData.Displayname) m_fileData.Displayname = "Selection"; if (null == m_fileData.Filename) m_fileData.Filename = "Selection"; m_fileData.FileType = Workshare.Policy.FileType.Selection; }
public void WriteFile(string virtualPath, System.IO.Stream inputStream) { var temp = new byte[10000]; long size = inputStream.Read(temp, 0, temp.Length); var buffer = new byte[size]; Array.Copy(temp, buffer, size); contents[virtualPath] = buffer; files[virtualPath] = new FileData { VirtualPath = virtualPath }; }
public void WriteFile(string virtualPath, System.IO.Stream inputStream, DateTime? lastWriteTime = null) { var temp = new byte[10000]; long size = inputStream.Read(temp, 0, temp.Length); var buffer = new byte[size]; Array.Copy(temp, buffer, size); contents[virtualPath] = buffer; files[virtualPath] = new FileData { VirtualPath = virtualPath, Updated = lastWriteTime ?? DateTime.UtcNow}; }
/// <summary> /// Método usado para extrair o conteúdo de um arquivo específico. /// </summary> /// <param name="arq">O arquivo a ser indexado.</param> public void Indexe(Arquivo arq) { try { IGoldenIndex goldenIndex = GoldenIndexClient.Instance(Settings.Default.MaquinaGoldenIndex, Settings.Default.PortaGoldenIndex, Settings.Default.UriGoldenIndex, Settings.Default.ProtocoloGoldenIndex); User usuarioGoldenIndex = GoldenIndexClient.Authenticate(Settings.Default.UsuarioGoldenIndex, Settings.Default.SenhaGoldenIndex, goldenIndex); //string extensao = versao.Extensao.StartsWith(".") ? versao.Extensao : string.Concat(".", versao.Extensao); //if (!goldenIndex.IsSupported(usuarioGoldenIndex, extensao)) //{ // // Se não for um arquivo suportado, não faz nada // return; //} string id = obterIdCadastrado(); //List<Arquivo> lista = new List<Arquivo>(); //lista = obterIdArquivos(id); //foreach (var arq in lista) //{ FileData arquivo = new FileData(); //arquivo.Id = count++; arquivo.Url = diretorio + arq.nome_Arquivo; //TESTE CollectionFieldUpdatingParameters parameters = new CollectionFieldUpdatingParameters(); parameters.CollectionName = "arquivos"; parameters.ContentField = "conteudo_Arquivo"; // é o mesmo que a coleção neste caso, porque o multivalorado não está agrupado num grupo parameters.Table = "documento"; //parameters.ParentField = "id"; //parameters.ParentFieldValue = id; // aqui supomos que haja um registro na base Pessoas cujo Id seja 12. é nesse registro que será adicionado uma nova linha na coleção "Documentos" parameters.IdField = "id_arquivo"; parameters.IdFieldValue = arq.id_Arquivo.ToString(); arquivo.IndexerParameters = parameters; //arquivo.Id = Convert.ToUInt32(arq.id_Arquivo); goldenIndex.SaveFile(usuarioGoldenIndex, arquivo); // } } catch (Exception exception) { string erro = exception.Message; } }
private MediaFile LoadMediaFile(FileData fileData) { return new MediaFile( _idGenerator.GetNextId(), fileData.Name, fileData.Path, fileData.LastModified, fileData.Size, GetDuration(fileData.Path), MediaFileType.All.FindByExtension(fileData.Extension), new List<Tag>()); }
public void addMetadata(string Key, FileData Value) { if (MetadataTable.Contains(Key)) { ((MetadataElement)MetadataTable[Key]).File.Add(Value); } else { MetadataElement newArray = new MetadataElement(); newArray.File.Add(Value); MetadataTable.Add(Key, newArray); } }
public static FileData UnwrapCSVFile(string fileName) { string[][] fileData = ReadFile(fileName); List<double[]> inputs = new List<double[]>(); List<double[]> outputs = new List<double[]>(); List<double[]> inputSet = new List<double[]>(); for(int line = 0; line < fileData.Length; line++) { if(fileData[line].Length > outputFields[0]) { //this is an input-output learning pair List<string> buffer_inputs = new List<string>(); List<string> buffer_outputs = new List<string>(); for( int field = 0; field < fileData[line].Length; field++) { if(outputFields.Contains(field)) { buffer_outputs.Add(fileData[line][field]); } else { buffer_inputs.Add(fileData[line][field]); } } double[] buffer2_inputs = BuildDataSet(buffer_inputs); double[] buffer2_outputs = BuildDataSet(buffer_outputs); inputs.Add(buffer2_inputs); outputs.Add(buffer2_outputs); } else { //then it's just an input set inputSet.Add(BuildDataSet(fileData[line])); } } IOBatch i = new IOBatch(); i.InputList = inputs; i.OutputList = outputs; FileData returnData = new FileData(); returnData.LearningData = i; returnData.InputSet = inputSet; return returnData; }
public static FileData Eeg3_OpenFile(String path) { TCoh3 metaInfo = ReadMetaInfo(path); int nbValues = metaInfo.duration * metaInfo.frequency; short[,] eegMatrice = ReadEEG(nbValues, metaInfo.electrodes); //eegMatrice = null; FileNext(); FileData fileData = new FileData { metaData = metaInfo, eegMatrice = eegMatrice, path=path }; return fileData; }
private void AddClassData(Compound _compound, TemplateInfo _info, ClassInfo _classInfo, FileData _outputFile) { List<CompoundFunction> funtions = new List<CompoundFunction>(); List<Pair<CompoundFunction, CompoundFunction>> properties = new List<Pair<CompoundFunction, CompoundFunction>>(); List<CompoundVariable> variables = new List<CompoundVariable>(); foreach (Compound child in _compound) { if (child is CompoundFunction) { CompoundFunction func = (CompoundFunction)child; if (GetAviableFunc(func)) { if ( //func.Const && func.CompoundParamTypes.Count == 0 && func.CompoundType.TypeName != "void" && (func.GetProperty || func.IsProperty)) properties.Add(new Pair<CompoundFunction, CompoundFunction>(func, null)); else funtions.Add(func); } } else if (child is CompoundVariable) { CompoundVariable variable = (CompoundVariable)child; if (variable.Public && !variable.Static && !variable.Name.StartsWith("_") && CompoundUtility.IsVariableEvent(variable)) { variables.Add(variable); } } } // вытаскиваем сетеры для свойств из списка функций foreach (var func in properties) func.Second = PopSetterFunc(func.First, funtions); foreach (var func in properties) AddClassProperty(func, _info, _classInfo, _outputFile); foreach (var func in funtions) AddClassFunction(func, _info, _classInfo, _outputFile); foreach (var variable in variables) AddClassEvent(variable, _info, _classInfo, _outputFile); }
/// <summary> /// Initializes a new instance of the CompareFileData class and sets properties based on the FileData provided /// </summary> /// <param name="leftFile">First set of FileData to use in comparison</param> /// <param name="rightFile">Second set of FileData to use in comparison</param> public CompareFileData(FileData leftFile, FileData rightFile) { if (leftFile == null) { throw new ArgumentNullException("leftFile"); } if (rightFile == null) { throw new ArgumentNullException("rightFile"); } this.LeftFileData = leftFile; this.RightFileData = rightFile; this.FileCompareText = string.Format(Strings.CompareFilesHeader, Path.GetFileName(leftFile.FileName), Path.GetFileName(rightFile.FileName)); bool samePublicMetadata = true; bool samePersonalMetadata = true; this.PublicMetadata = CompareFileData.CompareMetadata(leftFile.PublicMetadata, rightFile.PublicMetadata, out samePublicMetadata); this.PersonalMetadata = CompareFileData.CompareMetadata(leftFile.PersonalMetadata, rightFile.PersonalMetadata, out samePersonalMetadata); this.Streams = CompareFileData.CompareStreams(leftFile.Streams, rightFile.Streams); if (leftFile.PublicMetadata.Count == 0 && rightFile.PublicMetadata.Count == 0) { this.PublicMetadataCompareText = string.Format(Strings.PublicMetadataHeader, Strings.None); } else { this.PublicMetadataCompareText = string.Format(Strings.PublicMetadataHeader, samePublicMetadata ? Strings.Same : Strings.Different); } if (leftFile.PersonalMetadata.Count == 0 && rightFile.PersonalMetadata.Count == 0) { this.PersonalMetadataCompareText = string.Format(Strings.PersonalMetadataHeader, Strings.None); } else { this.PersonalMetadataCompareText = string.Format(Strings.PersonalMetadataHeader, samePersonalMetadata ? Strings.Same : Strings.Different); } if (leftFile.Streams.Count == 0 && rightFile.Streams.Count == 0) { this.StreamsCompareText = string.Format(Strings.StreamsHeader, Strings.None); } else { bool sameStreamData = this.Streams.All(data => data.Same); this.StreamsCompareText = string.Format(Strings.StreamsHeader, sameStreamData ? Strings.Same : Strings.Different); } }
public void CreateDAL(bool build) { string[] fileNames; //获取要生成的文件 if (genConfig != null) { if (genConfig.bllFiles.Count > 0) { fileNames = new string[genConfig.bllFiles.Count]; for (int i = 0; i < fileNames.Length; i++) { fileNames[i] = Frame.MapPath(genConfig.bllFiles[i]); } } else { return; } } else { if (!Directory.Exists(Frame.MapPath(config.BLL))) { return; } //获取所有的bll文件 fileNames = Directory.GetFiles(Frame.MapPath(config.BLL), "*.cs", SearchOption.AllDirectories); if (fileNames == null || fileNames.Length < 1) { return; } } //生成的代码 string compileCode = ""; //类名称 string ClassName = ""; int relative_position = 0; if (fileNames.Length > 0) { Compiler com = new Compiler(); FileData fileData = null; ClassData classData = null; MethodData methodData = null; StreamWriter sw = null; string compileFileName = ""; string compileDir = ""; string methodContent = ""; for (int i = 0; i < fileNames.Length; i++) { relative_position = 0; fileData = com.GetFileData(Frame.appRoot, this.config.APP, fileNames[i], System.Text.Encoding.UTF8); ClassName = Path.GetFileNameWithoutExtension(fileNames[i]); classData = fileData.GetClassData(ClassName); compileCode = fileData.csharpCode.ToString(); //相对 compileDir = config.ChangeBLLName(config.DAL.Trim('/'), fileData.nameSpace.Replace('.', '/')); //绝对 compileFileName = Frame.MapPath(compileDir + "\\" + ClassName + ".cs"); //更改命名空间 relative_position += Replace(ref compileCode, relative_position + fileData.start, fileData.length, "namespace " + (Frame.AssemblyTitle + "/" + compileDir.TrimEnd('/')).Replace('/', '.') + "\r\n{"); if (classData.MethodDataList.Count > 0) { for (int j = 0; j < classData.MethodDataList.Count; j++) { methodData = classData.MethodDataList[j]; methodContent = methodData.Content; SqlCompiler sqlCompiler = new NFinal.Compile.SqlCompiler(); //从代码中分析出数据库相关函数 List <DbFunctionData> dbFunctions = sqlCompiler.Compile(com.DeleteComment(methodContent)); if (dbFunctions.Count > 0) { SqlAnalyse analyse = new SqlAnalyse(); //与数据库相结合,从sql语句中分析出所有的表信息,列信息 methodData.dbFunctions = analyse.FillFunctionDataList(NFinal.DB.Coding.DB.DbStore, dbFunctions); } //数据库函数替换 int content_relative_position = 0; if (dbFunctions.Count > 0) { bool hasSameVarName = false; List <string> varNames = new List <string>(); //添加struct类型 for (int s = 0; s < dbFunctions.Count; s++) { //去除重复项 if (varNames.Count > 0) { hasSameVarName = false; for (int c = 0; c < varNames.Count; c++) { //如果发现重复项,则跳过循环 if (varNames[c] == dbFunctions[s].varName) { hasSameVarName = true; break; } } if (hasSameVarName) { continue; } } varNames.Add(dbFunctions[s].varName); //分析出sql返回的List<dynamic>和dynamic类型是否有用AddNewField(string fileName,Type t);添加相关的类型 NewField newFiled = new NewField(dbFunctions[s].varName); List <NFinal.Compile.StructField> structFieldList = newFiled.GetFields(ref methodContent, methodData.name); //添加struct字段 string StructData = sqlCompiler.SetMagicStruct(methodData.name, dbFunctions[s], structFieldList, Frame.appRoot); if (!string.IsNullOrEmpty(StructData)) { compileCode = compileCode.Insert(methodData.start + relative_position, StructData); relative_position += StructData.Length; } } //修正函数返回类型,提高执行效率 if (methodData.returnType.IndexOf("dynamic") > -1) { string returnTypeString = ""; if (new System.Text.RegularExpressions.Regex(@"List\s*<\s*dynamic\s*>").IsMatch(methodData.returnType)) { returnTypeString = string.Format("NFinal.DB.NList<__{0}_{1}__>", methodData.name, methodData.returnVarName); } else { returnTypeString = string.Format("__{0}_{1}__", methodData.name, methodData.returnVarName); } relative_position += Replace(ref compileCode, methodData.returnTypeIndex + relative_position + classData.position, methodData.returnType.Length, returnTypeString); } //更换函数内的数据库操作函数 content_relative_position += sqlCompiler.SetMagicFunction(methodData.name, ref methodContent, content_relative_position, methodData.dbFunctions, Frame.appRoot); //分析并更换其中的连接字符串 content_relative_position += sqlCompiler.SetMagicConnection(methodData.name, ref methodContent, Frame.appRoot ); } if (build) { relative_position += Replace(ref compileCode, relative_position + methodData.position, methodData.Content.Length, methodContent); } else { if (methodData.returnType == "void") { relative_position += Replace(ref compileCode, relative_position + methodData.position, methodData.Content.Length, string.Empty); } else { relative_position += Replace(ref compileCode, relative_position + methodData.position, methodData.Content.Length, "return null;"); } } } } //如果文件夹不存在则新建 if (!Directory.Exists(Frame.MapPath(compileDir))) { Directory.CreateDirectory(Frame.MapPath(compileDir)); } //写DAL层.class文件 sw = new StreamWriter(compileFileName, false, System.Text.Encoding.UTF8); sw.Write(compileCode); sw.Close(); } } }
internal UploadcareFile(Client client, FileData fileData) { _client = client; _fileData = fileData; }
/// <summary> /// Writes the header part to an output stream. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="offset">The absolute offset in the stream to write at.</param> /// <param name="part5">The data to write.</param> /// <param name="p">Progress info.</param> /// <returns>An async task.</returns> internal async Task WriteHeaderPart5Async(Stream stream, UInt64 offset, NefsHeaderPart5 part5, NefsProgress p) { await FileData.WriteDataAsync(stream, offset, part5, p); }
private void toolStripSaveRace_Click(object sender, EventArgs e) { FileData.SaveFile(@"C:\Users\Public\Documents\data.txt", runRace.RunnerList); }
private void ViewDocumentAsImage(ViewDocumentParameters request, ViewDocumentResponse result, string fileName) { var docInfo = _imageHandler.GetDocumentInfo(request.Path); var maxWidth = 0; var maxHeight = 0; foreach (var pageData in docInfo.Pages) { if (pageData.Height > maxHeight) { maxHeight = pageData.Height; maxWidth = pageData.Width; } } var fileData = new FileData { DateCreated = DateTime.Now, DateModified = docInfo.LastModificationDate, PageCount = docInfo.Pages.Count, Pages = docInfo.Pages, MaxWidth = maxWidth, MaxHeight = maxHeight }; DocumentInfoContainer documentInfoContainer = _imageHandler.GetDocumentInfo(request.Path); int[] pageNumbers = new int[documentInfoContainer.Pages.Count]; for (int i = 0; i < documentInfoContainer.Pages.Count; i++) { pageNumbers[i] = documentInfoContainer.Pages[i].Number; } string applicationHost = GetApplicationHost(); var documentUrls = ImageUrlHelper.GetImageUrls(applicationHost, pageNumbers, request); string[] attachmentUrls = new string[0]; foreach (AttachmentBase attachment in docInfo.Attachments) { List <PageImage> pages = _imageHandler.GetPages(attachment); var attachmentInfo = _imageHandler.GetDocumentInfo(_tempPath + "\\" + Path.GetFileNameWithoutExtension(docInfo.Guid) + Path.GetExtension(docInfo.Guid).Replace(".", "_") + "\\attachments\\" + attachment.Name); fileData.PageCount += pages.Count; fileData.Pages.AddRange(attachmentInfo.Pages); ViewDocumentParameters attachmentResponse = request; attachmentResponse.Path = attachmentInfo.Guid; int[] attachmentPageNumbers = new int[pages.Count]; for (int i = 0; i < pages.Count; i++) { attachmentPageNumbers[i] = pages[i].PageNumber; } Array.Resize <string>(ref attachmentUrls, (attachmentUrls.Length + pages.Count)); string[] attachmentImagesUrls = new string[pages.Count]; attachmentImagesUrls = ImageUrlHelper.GetImageUrls(applicationHost, attachmentPageNumbers, attachmentResponse); attachmentImagesUrls.CopyTo(attachmentUrls, (attachmentUrls.Length - pages.Count)); } SerializationOptions serializationOptions = new SerializationOptions { UsePdf = request.UsePdf, SupportListOfBookmarks = request.SupportListOfBookmarks, SupportListOfContentControls = request.SupportListOfContentControls }; var documentInfoJson = new DocumentInfoJsonSerializer(docInfo, serializationOptions).Serialize(); result.documentDescription = documentInfoJson; result.docType = docInfo.DocumentType; result.fileType = docInfo.FileType; if (docInfo.Attachments.Count > 0) { var imagesUrls = new string[attachmentUrls.Length + documentUrls.Length]; documentUrls.CopyTo(imagesUrls, 0); attachmentUrls.CopyTo(imagesUrls, documentUrls.Length); result.imageUrls = imagesUrls; } else { result.imageUrls = documentUrls; } }
/// <summary> /// Creates the template and file data. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="filePath">The file path.</param> /// <param name="packageName">Name of the package.</param> /// <param name="fileMonitorDataManager">The file monitor data manager.</param> /// <param name="fileData">The file data.</param> private void CreateTemplateAndFileData(string fileName, string filePath, string packageName, FileMonitorDataManager fileMonitorDataManager, FileData fileData) { var absolutePath = FrontendManager.VirtualPathBuilder.MapPath(filePath); if (!this.IsFileInValidFolder(absolutePath, packageName)) { return; } var extension = fileName.Split('.').LastOrDefault(); var fileNameWithoutExtension = fileName.Substring(0, fileName.Length - (extension.Length + 1)); var viewFileExtensions = this.GetViewExtensions(); if (viewFileExtensions.Contains(extension, StringComparer.Ordinal)) { string templateTitle = string.Empty; if (string.IsNullOrEmpty(packageName)) { templateTitle = fileNameWithoutExtension; } else { templateTitle = packageName + "." + fileNameWithoutExtension; } if (fileData == null) { fileData = fileMonitorDataManager.CreateFileData(); } fileData.FilePath = filePath; fileData.FileName = fileName; fileData.PackageName = packageName; fileMonitorDataManager.SaveChanges(); this.CreateTemplate(templateTitle); } }
private static void AssertFileData(FileData fileData1, FileType fileType1, string fileContents1, FileData fileData2, FileType fileType2, string fileContents2) { if (fileType1 == fileType2) { Assert.AreEqual(fileData1.FileType, fileData2.FileType); Assert.AreEqual(fileData1.FileType, fileType1); Assert.AreEqual(fileData1.FileType, fileType2); } else { Assert.AreNotEqual(fileData1.FileType, fileData2.FileType); Assert.AreEqual(fileData1.FileType, fileType1); Assert.AreEqual(fileData2.FileType, fileType2); } if (fileContents1 == fileContents2) { Assert.AreEqual(fileData1.ContentLength, fileData2.ContentLength); Assert.AreEqual(fileData1.ContentLength, fileContents1.Length); Assert.AreEqual(fileData1.ContentLength, fileContents2.Length); AssertAreEqual(fileData1.ContentHash, fileData2.ContentHash); } else { Assert.AreEqual(fileData1.ContentLength, fileContents1.Length); Assert.AreEqual(fileData1.ContentLength, fileContents2.Length); AssertAreNotEqual(fileData1.ContentHash, fileData2.ContentHash); } }
protected UrlData newMediaObjectLogic( string blogid, string username, string password, FileData file) { if (validateUser(username, password)) { User u = new User(username); Channel userChannel = new Channel(username); UrlData fileUrl = new UrlData(); if (userChannel.ImageSupport) { Media rootNode; if (userChannel.MediaFolder > 0) { rootNode = new Media(userChannel.MediaFolder); } else { rootNode = new Media(u.StartMediaId); } // Create new media Media m = Media.MakeNew(file.name, MediaType.GetByAlias(userChannel.MediaTypeAlias), u, rootNode.Id); Property fileObject = m.getProperty(userChannel.MediaTypeFileProperty); var filename = file.name.Replace("/", "_"); var relativeFilePath = _fs.GetRelativePath(fileObject.Id, filename); fileObject.Value = _fs.GetUrl(relativeFilePath); fileUrl.url = fileObject.Value.ToString(); if (!fileUrl.url.StartsWith("http")) { var protocol = GlobalSettings.UseSSL ? "https" : "http"; fileUrl.url = protocol + "://" + HttpContext.Current.Request.ServerVariables["SERVER_NAME"] + fileUrl.url; } _fs.AddFile(relativeFilePath, new MemoryStream(file.bits)); // Try updating standard file values try { string orgExt = ""; // Size if (m.getProperty("umbracoBytes") != null) { m.getProperty("umbracoBytes").Value = file.bits.Length; } // Extension if (m.getProperty("umbracoExtension") != null) { orgExt = ((string) file.name.Substring(file.name.LastIndexOf(".") + 1, file.name.Length - file.name.LastIndexOf(".") - 1)); m.getProperty("umbracoExtension").Value = orgExt.ToLower(); } // Width and Height // Check if image and then get sizes, make thumb and update database if (m.getProperty("umbracoWidth") != null && m.getProperty("umbracoHeight") != null && ",jpeg,jpg,gif,bmp,png,tiff,tif,".IndexOf("," + orgExt.ToLower() + ",") > 0) { int fileWidth; int fileHeight; var stream = _fs.OpenFile(relativeFilePath); Image image = Image.FromStream(stream); fileWidth = image.Width; fileHeight = image.Height; stream.Close(); try { m.getProperty("umbracoWidth").Value = fileWidth.ToString(); m.getProperty("umbracoHeight").Value = fileHeight.ToString(); } catch { } } } catch { } return(fileUrl); } else { throw new ArgumentException( "Image Support is turned off in this channel. Modify channel settings in umbraco to enable image support."); } } return(new UrlData()); }
/// <summary> /// Function to open a stream to a file on the physical file system from the <see cref="IGorgonVirtualFile"/> passed in. /// </summary> /// <param name="file">The <see cref="IGorgonVirtualFile"/> that will be used to locate the file that will be opened on the physical file system.</param> /// <returns>A <see cref="Stream"/> to the file, or <b>null</b> if the file does not exist.</returns> /// <exception cref="ArgumentNullException">Thrown when the <paramref name="file"/> parameter is <b>null</b>.</exception> /// <remarks> /// <para> /// This will take the <see cref="IGorgonVirtualFile"/> and open its corresponding physical file location as a stream for reading. The stream that is returned will be opened, and as such, it is the /// responsibility of the user to close the stream when finished. /// </para> /// </remarks> protected virtual Stream OnOpenFileStream(IGorgonVirtualFile file) => FileData.OpenReadStream(file.FullPath);
static void Main(string[] args) { #region DI IServiceProvider provider = GetProvider(); SampleConfig sampleConfig = provider.GetRequiredService <SampleConfig>(); #endregion #region Upload and Download File MongoFileStorage fileStorage = new MongoFileStorage(sampleConfig.StorageDB); // Get file name and bytes string fileFullName = new FileInfo(sampleConfig.UploadFilePath).Name; byte[] fileBytes = File.ReadAllBytes(sampleConfig.UploadFilePath); string id = fileStorage.Upload(fileFullName, fileBytes); Console.WriteLine($"Upload {fileFullName} => the file id :{id}"); Console.WriteLine($"=============================================================================================="); FileData file = fileStorage.Download(id); Console.WriteLine($"Download {file.Id} => the file name is {file.Name}"); Console.WriteLine($"=============================================================================================="); bool isDel = fileStorage.Delete(file.Id); Console.WriteLine($"Delete { file.Id} => {isDel}"); Console.WriteLine($"=============================================================================================="); #endregion #region CRUD MongoStorage storage = new MongoStorage(sampleConfig.StorageDB); try { storage.StartTransaction(); storage.Insert <SampleConfig>(sampleConfig, tableName: "test"); Console.WriteLine($"Insert => {JsonConvert.SerializeObject(sampleConfig)}"); Console.WriteLine($"=============================================================================================="); List <SampleConfig> dataSet = storage.Query <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Query => {JsonConvert.SerializeObject(dataSet)}"); Console.WriteLine($"=============================================================================================="); storage.Update <SampleConfig>(o => o.UploadFilePath.Length > 0, Builders <SampleConfig> .Update.Set(o => o.UploadFilePath, "Test Path"), tableName: "test"); Console.WriteLine($"Update => UploadFilePath = Test Path"); Console.WriteLine($"=============================================================================================="); dataSet = storage.Query <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Query => {JsonConvert.SerializeObject(dataSet)}"); Console.WriteLine($"=============================================================================================="); storage.Replace <SampleConfig>(o => o.UploadFilePath.Length > 0, sampleConfig, tableName: "test"); Console.WriteLine($"Replace => {JsonConvert.SerializeObject(sampleConfig)}"); Console.WriteLine($"=============================================================================================="); dataSet = storage.Query <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Query => {JsonConvert.SerializeObject(dataSet)}"); Console.WriteLine($"=============================================================================================="); storage.Delete <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Query => {JsonConvert.SerializeObject(dataSet)}"); Console.WriteLine($"=============================================================================================="); dataSet = storage.Query <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Query => {JsonConvert.SerializeObject(dataSet)}"); Console.WriteLine($"=============================================================================================="); long count = storage.Count <SampleConfig>(o => o.UploadFilePath.Length > 0, tableName: "test"); Console.WriteLine($"Count => {count}"); Console.WriteLine($"=============================================================================================="); storage.CommitTransaction(); } catch (Exception ex) { storage.AbortTransaction(); throw; } #endregion Console.ReadKey(); }
/// <param name="useSpansForWriting">Tests the Span overloads of Write</param> /// <param name="writeInChunks">Writes in chunks of 5 to test Write with a nonzero offset</param> public static async Task CreateFromDir(string directory, Stream archiveStream, ZipArchiveMode mode, bool useSpansForWriting = false, bool writeInChunks = false) { var files = FileData.InPath(directory); using (ZipArchive archive = new ZipArchive(archiveStream, mode, true)) { foreach (var i in files) { if (i.IsFolder) { string entryName = i.FullName; ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/') + "/"); e.LastWriteTime = i.LastModifiedDate; } } foreach (var i in files) { if (i.IsFile) { string entryName = i.FullName; var installStream = await StreamHelpers.CreateTempCopyStream(Path.Combine(i.OrigFolder, i.FullName)); if (installStream != null) { ZipArchiveEntry e = archive.CreateEntry(entryName.Replace('\\', '/')); e.LastWriteTime = i.LastModifiedDate; using (Stream entryStream = e.Open()) { int bytesRead; var buffer = new byte[1024]; if (useSpansForWriting) { while ((bytesRead = installStream.Read(new Span <byte>(buffer))) != 0) { entryStream.Write(new ReadOnlySpan <byte>(buffer, 0, bytesRead)); } } else if (writeInChunks) { while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) { for (int k = 0; k < bytesRead; k += 5) { entryStream.Write(buffer, k, Math.Min(5, bytesRead - k)); } } } else { while ((bytesRead = installStream.Read(buffer, 0, buffer.Length)) != 0) { entryStream.Write(buffer, 0, bytesRead); } } } } } } } }
public static FileData GetFileData(string filepath) { FileData result = new FileData(); //Basic Info result.FilePath = filepath; result.FileName = Path.GetFileNameWithoutExtension(filepath); result.FileNameWithExt = Path.GetFileName(filepath); if (CheckXCI(filepath)) { //Get File Size GetFileSize(ref result); //Load Deep File Info (Probably we should clean it a bit more) string actualHash; byte[] hashBuffer; long offset; long[] SecureSize = { }; long[] NormalSize = { }; long[] SecureOffset = { }; long[] NormalOffset = { }; long gameNcaOffset = -1; long gameNcaSize = -1; long PFS0Offset = -1; long PFS0Size = -1; FileStream fileStream = new FileStream(filepath, FileMode.Open, FileAccess.Read); HFS0.HSF0_Entry[] array = new HFS0.HSF0_Entry[HFS0.HFS0_Headers[0].FileCount]; fileStream.Position = XCI.XCI_Headers[0].HFS0OffsetPartition + 16 + 64 * HFS0.HFS0_Headers[0].FileCount; List <char> chars = new List <char>(); long num = XCI.XCI_Headers[0].HFS0OffsetPartition + XCI.XCI_Headers[0].HFS0SizeParition; byte[] array2 = new byte[64]; byte[] array3 = new byte[16]; byte[] array4 = new byte[24]; for (int i = 0; i < HFS0.HFS0_Headers[0].FileCount; i++) { fileStream.Position = XCI.XCI_Headers[0].HFS0OffsetPartition + 16 + 64 * i; fileStream.Read(array2, 0, 64); array[i] = new HFS0.HSF0_Entry(array2); fileStream.Position = XCI.XCI_Headers[0].HFS0OffsetPartition + 16 + 64 * HFS0.HFS0_Headers[0].FileCount + array[i].Name_ptr; int num2; while ((num2 = fileStream.ReadByte()) != 0 && num2 != 0) { chars.Add((char)num2); } array[i].Name = new string(chars.ToArray()); chars.Clear(); offset = num + array[i].Offset; hashBuffer = new byte[array[i].HashedRegionSize]; fileStream.Position = offset; fileStream.Read(hashBuffer, 0, array[i].HashedRegionSize); actualHash = SHA256Bytes(hashBuffer); HFS0.HFS0_Header[] array5 = new HFS0.HFS0_Header[1]; fileStream.Position = array[i].Offset + num; fileStream.Read(array3, 0, 16); array5[0] = new HFS0.HFS0_Header(array3); if (array[i].Name == "secure") { SecureSize = new long[array5[0].FileCount]; SecureOffset = new long[array5[0].FileCount]; } if (array[i].Name == "normal") { NormalSize = new long[array5[0].FileCount]; NormalOffset = new long[array5[0].FileCount]; } HFS0.HSF0_Entry[] array6 = new HFS0.HSF0_Entry[array5[0].FileCount]; for (int j = 0; j < array5[0].FileCount; j++) { fileStream.Position = array[i].Offset + num + 16 + 64 * j; fileStream.Read(array2, 0, 64); array6[j] = new HFS0.HSF0_Entry(array2); fileStream.Position = array[i].Offset + num + 16 + 64 * array5[0].FileCount + array6[j].Name_ptr; if (array[i].Name == "secure") { SecureSize[j] = array6[j].Size; SecureOffset[j] = array[i].Offset + array6[j].Offset + num + 16 + array5[0].StringTableSize + array5[0].FileCount * 64; } if (array[i].Name == "normal") { NormalSize[j] = array6[j].Size; NormalOffset[j] = array[i].Offset + array6[j].Offset + num + 16 + array5[0].StringTableSize + array5[0].FileCount * 64; } while ((num2 = fileStream.ReadByte()) != 0 && num2 != 0) { chars.Add((char)num2); } array6[j].Name = new string(chars.ToArray()); chars.Clear(); offset = array[i].Offset + array6[j].Offset + num + 16 + array5[0].StringTableSize + array5[0].FileCount * 64; hashBuffer = new byte[array6[j].HashedRegionSize]; fileStream.Position = offset; fileStream.Read(hashBuffer, 0, array6[j].HashedRegionSize); actualHash = SHA256Bytes(hashBuffer); } } long num3 = -9223372036854775808L; for (int k = 0; k < SecureSize.Length; k++) { if (SecureSize[k] > num3) { gameNcaSize = SecureSize[k]; gameNcaOffset = SecureOffset[k]; num3 = SecureSize[k]; } } PFS0Offset = gameNcaOffset + 32768; fileStream.Position = PFS0Offset; fileStream.Read(array3, 0, 16); PFS0.PFS0_Headers[0] = new PFS0.PFS0_Header(array3); PFS0.PFS0_Entry[] array8; array8 = new PFS0.PFS0_Entry[PFS0.PFS0_Headers[0].FileCount]; for (int m = 0; m < PFS0.PFS0_Headers[0].FileCount; m++) { fileStream.Position = PFS0Offset + 16 + 24 * m; fileStream.Read(array4, 0, 24); array8[m] = new PFS0.PFS0_Entry(array4); PFS0Size += array8[m].Size; } for (int n = 0; n < PFS0.PFS0_Headers[0].FileCount; n++) { fileStream.Position = PFS0Offset + 16 + 24 * PFS0.PFS0_Headers[0].FileCount + array8[n].Name_ptr; int num4; while ((num4 = fileStream.ReadByte()) != 0 && num4 != 0) { chars.Add((char)num4); } array8[n].Name = new string(chars.ToArray()); chars.Clear(); } fileStream.Close(); NCA.NCA_Headers[0] = new NCA.NCA_Header(DecryptNCAHeader(filepath, gameNcaOffset)); result.TitleID = NCA.NCA_Headers[0].TitleID.ToString("X"); } return(result); }
public async Task <Attachment> UploadAttachment(FileData fileData) { var UploadedAttachment = await DependencyService.Get <IFirebaseStorage>().UploadFile(fileData); return(UploadedAttachment); }
private static bool?IsFileMatch(MapCriteria criteria, NetworkMessageInfo source, FileData file) { string sourceValue; switch (criteria.SourceType) { case MappingDataType.FileValue: sourceValue = source.Source.FindFileValue(criteria.SourceName); break; case MappingDataType.EventValue: sourceValue = source.Source.FindEventValue(criteria.SourceName); break; default: throw new ArgumentOutOfRangeException(); } if (sourceValue == null) { return(null); } switch (criteria.SourceName.ToUpper()) { // Condition DateTime case Keywords.DATETIME_UPPER: DateTime sourceDateTime; if (DateTimeExt.TryParseWithTimeZoneRemoval(sourceValue, out sourceDateTime)) { return(sourceDateTime >= file.Start && sourceDateTime <= file.End); } return(false); default: switch (criteria.Operator) { case Keywords.EQUAL: return(sourceValue == file.FindFileValue(criteria.TargetName)); case Keywords.NOT_EQUAL: return(sourceValue != file.FindFileValue(criteria.TargetName)); } break; } return(null); }
/// <summary> /// Function to enumerate the files and directories from a physical location and map it to a virtual location. /// </summary> /// <param name="physicalLocation">The physical location containing files and directories to enumerate.</param> /// <param name="mountPoint">A <see cref="IGorgonVirtualDirectory"/> that the directories and files from the physical file system will be mounted into.</param> /// <returns>A <see cref="GorgonPhysicalFileSystemData"/> object containing information about the directories and files contained within the physical file system.</returns> /// <exception cref="ArgumentNullException">Thrown when the <paramref name="physicalLocation"/>, or the <paramref name="mountPoint"/> parameters are <b>null</b>.</exception> /// <exception cref="ArgumentEmptyException">Thrown when the <paramref name="physicalLocation"/> parameter is empty.</exception> /// <remarks> /// Since this provider holds data in its own block of memory, there's nothing to enumerate when the provider is loaded. Thus, this will always return empty data. /// </remarks> protected virtual GorgonPhysicalFileSystemData OnEnumerate(string physicalLocation, IGorgonVirtualDirectory mountPoint) => new GorgonPhysicalFileSystemData(FileData.GetDirectories(), FileData.GetFileInfos() .Select(item => new PhysicalFileInfo(Prefix + "::" + item.FullPath, item.CreateDate, item.Size, item.FullPath, 0, item.LastModified)) .ToArray());
private void toolStripLoadRace_Click(object sender, EventArgs e) { runRace.RunnerList.Clear(); runRace.RunnerList = FileData.LoadRunnerList(pathfile); dgwRunner.DataSource = FileData.LoadRunnerList(pathfile).Values.ToList <Runner>(); }
/// <summary> /// Returns a list of NetworkMessageInfoModels that map to the Source NetworkMessageInfo for the given Target FileData /// </summary> public static IEnumerable <NetworkMessageInfoModel> GetNetworkMessageByTargetFile(LineArgs sourceArgs, NetworkMessageInfo sourceMsg, FileData targetFile) { try { sourceArgs.StatusUpdate(StatusModel.StartStopWatch); sourceArgs.StatusUpdate(StatusModel.Update("Linking Network Messages", "")); lock (targetFile.Network) { var stopwatch = new Stopwatch(MethodBase.GetCurrentMethod().Name); try { var result = new List <NetworkMessageInfoModel>(); foreach (var map in GetNetworkMapsByLine(sourceArgs.Line).OrderBy(n => n.Priority)) { if (result.Count > 0 && map.OnlyUseFallThrough) { continue; } // If there is NO EventPattern Value(s) criteria, then add all Network Messages for the given FileData if (!map.Criteria.Exists(n => n.TargetType == MappingDataType.EventValue && n.Enabled)) { foreach (var message in targetFile.Network.NetworkMessages) { AddMessage(result, message, map); } } else { foreach (var targetMsg in targetFile.Network.NetworkMessages) { bool?success = null; foreach (var criteria in map.Criteria.Where(n => n.TargetType == MappingDataType.EventValue && n.Enabled)) { success = IsMatch(criteria, sourceMsg, targetMsg, targetFile.Path).IsMatch; if (success != null && (bool)!success) { break; } } if (success != null && (bool)success) { AddMessage(result, targetMsg, map); } } } } return(result); } finally { stopwatch.Stop(500); } } } finally { sourceArgs.StatusUpdate(StatusModel.Completed); } }
public FileDetailsModel(FileData fileData) { Sha256Hash = fileData.Hashes.First(x => x.Algorithm == "sha-256").Value; _fileContent = fileData.Contents; _decodedContents = new Lazy <string>(DecodeContents); }
private static void AssertAreNotEqual(FileData fileData1, FileType fileType1, string fileContents1, FileData fileData2, FileType fileType2, string fileContents2) { Assert.AreNotEqual(fileData1.Id, fileData2.Id); AssertFileData(fileData1, fileType1, fileContents1, fileData2, fileType2, fileContents2); }
public CrossPlatformFile(FileData data) { this.data = data; }
private void ViewDocumentAsHtml(ViewDocumentParameters request, ViewDocumentResponse result, string fileName) { var docInfo = _htmlHandler.GetDocumentInfo(request.Path); var maxWidth = 0; var maxHeight = 0; foreach (var pageData in docInfo.Pages) { if (pageData.Height > maxHeight) { maxHeight = pageData.Height; maxWidth = pageData.Width; } } var fileData = new FileData { DateCreated = DateTime.Now, DateModified = docInfo.LastModificationDate, PageCount = docInfo.Pages.Count, Pages = docInfo.Pages, MaxWidth = maxWidth, MaxHeight = maxHeight }; var htmlOptions = new HtmlOptions() { IsResourcesEmbedded = false, HtmlResourcePrefix = string.Format( "/document-viewer/GetResourceForHtml?documentPath={0}", fileName) + "&pageNumber={page-number}&resourceName=", Watermark = Utils.GetWatermark(request.WatermarkText, request.WatermarkColor, request.WatermarkPosition, request.WatermarkWidth, request.WatermarkOpacity), }; if (request.PreloadPagesCount.HasValue && request.PreloadPagesCount.Value > 0) { htmlOptions.PageNumber = 1; htmlOptions.CountPagesToConvert = request.PreloadPagesCount.Value; } ///// List <string> cssList; var htmlPages = GetHtmlPages(fileName, htmlOptions, out cssList); foreach (AttachmentBase attachment in docInfo.Attachments) { var attachmentPath = _tempPath + "\\" + Path.GetFileNameWithoutExtension(docInfo.Guid) + Path.GetExtension(docInfo.Guid).Replace(".", "_") + "\\attachments\\" + attachment.Name; var attachmentHtmlOptions = new HtmlOptions() { IsResourcesEmbedded = Utils.IsImage(fileName), HtmlResourcePrefix = string.Format("/document-viewer/GetResourceForHtml?documentPath={0}", HttpUtility.UrlEncode(attachmentPath)) + "&pageNumber={page-number}&resourceName=", }; List <PageHtml> pages = _htmlHandler.GetPages(attachment, attachmentHtmlOptions); var attachmentInfo = _htmlHandler.GetDocumentInfo(attachmentPath); fileData.PageCount += attachmentInfo.Pages.Count; fileData.Pages.AddRange(attachmentInfo.Pages); List <string> attachmentCSSList; var attachmentPages = GetHtmlPages(attachmentInfo.Guid, attachmentHtmlOptions, out attachmentCSSList); cssList.AddRange(attachmentCSSList); htmlPages.AddRange(attachmentPages); } ///// result.documentDescription = new FileDataJsonSerializer(fileData, new FileDataOptions()).Serialize(false); result.docType = docInfo.DocumentType; result.fileType = docInfo.FileType; result.pageHtml = htmlPages.Select(_ => _.HtmlContent).ToArray(); result.pageCss = new[] { string.Join(" ", cssList) }; }
private void LoadData(FileData dataFile, FileData fieldMapsFile) { using (FileStream fieldMapsFileStream = new FileStream(workingDirectory + @"\fieldMapsFile.xlsx", FileMode.Create)) { try { fieldMapsFile.SaveToStream(fieldMapsFileStream); fieldMapsFileStream.Write(fieldMapsFile.Content, 0, fieldMapsFile.Content.Length); fieldMapsFileStream.Close(); } catch (IOException ex) { } } using (FileStream dataFileStream = new FileStream(workingDirectory + @"\dataFile.xlsx", FileMode.Create)) { try { dataFile.SaveToStream(dataFileStream); dataFileStream.Write(dataFile.Content, 0, dataFile.Content.Length); dataFileStream.Close(); } catch (IOException ex) { } } using (ExcelEngine excelEngine = new ExcelEngine()) { IWorkbook fieldMapsWorkbook = excelEngine.Excel.Workbooks.Open(workingDirectory + @"\fieldMapsFile.xlsx"); IWorksheet fieldMapsSheet = fieldMapsWorkbook.Worksheets[0]; IRange fieldMapsRange = fieldMapsSheet.UsedRange; int numberOfFieldMapsRows = fieldMapsRange.Rows.Length; IWorkbook dataWorkbook = excelEngine.Excel.Workbooks.Open(workingDirectory + @"\dataFile.xlsx"); IWorksheet dataSheet = dataWorkbook.Worksheets[0]; IRange dataRange = dataSheet.UsedRange; int numberODatafRows = dataRange.Rows.Length; string dataFileTabName = dataSheet.Name; List <FieldMap> fieldMapList = new List <FieldMap>(); for (int i = 1; i < numberOfFieldMapsRows; i++) //build a list of Field Maps { FieldMap fieldMap = new FieldMap(); if (fieldMapsSheet.Rows[i].Cells[0].Value != "") { fieldMap.ExcelTabName = fieldMapsSheet.Rows[i].Cells[0].Value; } if (fieldMapsSheet.Rows[i].Cells[1].Value != "") { fieldMap.ExcelColumnName = fieldMapsSheet.Rows[i].Cells[1].Value; } if (fieldMapsSheet.Rows[i].Cells[2].Value != "") { fieldMap.ExcelColumnPosition = Convert.ToInt32(fieldMapsSheet.Rows[i].Cells[2].Value); } if (fieldMapsSheet.Rows[i].Cells[4].Value != "") { fieldMap.FieldName = fieldMapsSheet.Rows[i].Cells[4].Value; } fieldMapList.Add(fieldMap); } DoLoad(fieldMapList, numberODatafRows, dataSheet); } }
public static async Task UpdateModifications() { //delete and move var testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { ZipArchiveEntry toBeDeleted = archive.GetEntry("binary.wmv"); toBeDeleted.Delete(); toBeDeleted.Delete(); //delete twice should be okay ZipArchiveEntry moved = archive.CreateEntry("notempty/secondnewname.txt"); ZipArchiveEntry orig = archive.GetEntry("notempty/second.txt"); using (Stream origMoved = orig.Open(), movedStream = moved.Open()) { origMoved.CopyTo(movedStream); } moved.LastWriteTime = orig.LastWriteTime; orig.Delete(); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("deleteMove"), ZipArchiveMode.Read, false, false); //append testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { ZipArchiveEntry e = archive.GetEntry("first.txt"); using (StreamWriter s = new StreamWriter(e.Open())) { s.BaseStream.Seek(0, SeekOrigin.End); s.Write("\r\n\r\nThe answer my friend, is blowin' in the wind."); } e.LastWriteTime = new DateTimeOffset(2010, 7, 7, 11, 57, 18, new TimeSpan(-7, 0, 0)); } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("append"), ZipArchiveMode.Read, false, false); //Overwrite file testArchive = await StreamHelpers.CreateTempCopyStream(ZipTest.zfile("normal.zip")); using (ZipArchive archive = new ZipArchive(testArchive, ZipArchiveMode.Update, true)) { String fileName = ZipTest.zmodified(Path.Combine("overwrite", "first.txt")); ZipArchiveEntry e = archive.GetEntry("first.txt"); var file = FileData.GetFile(fileName); e.LastWriteTime = file.LastModifiedDate; using (var stream = await StreamHelpers.CreateTempCopyStream(fileName)) { using (Stream es = e.Open()) { es.SetLength(0); stream.CopyTo(es); } } } ZipTest.IsZipSameAsDir(testArchive, ZipTest.zmodified("overwrite"), ZipArchiveMode.Read, false, false); }
/// <summary> /// Uses Deserialization to capture the desired file data /// </summary> /// <param name="table"></param> /// <returns></returns> public FileData DeserializeFileData(string table) { int _IdChoice; string _dirPath = ""; string _path; FileData _fileData = new FileData(); #region _path Declaration if (table == "ManualTx") { _dirPath = _manual; } if (table == "PendingTx") { _dirPath = _pending; } if (table == "ProcessedTx") { _dirPath = _processed; } if (table == "Extensioninfo") { _dirPath = _extensions; } if (_dirPath == "") { return(null); } #endregion List <string> _files = Directory.EnumerateFiles(_dirPath).ToList(); PrintFileName(); string _parseStr = Console.ReadLine(); if (!int.TryParse(_parseStr, out _IdChoice)) { _IdChoice = -1; } foreach (string file in _files) { _path = _dirPath + $"{table}_ID_{_IdChoice}_Tx.tranx"; if (file == _path) { _fileData = DeserializeFile(file); } } return(_fileData); // //Sub-Method // void PrintFileName() { int index = 1; Console.WriteLine("ID\tFileName"); foreach (string file in _files) { Console.WriteLine($"{index}\t{file.Remove(0, _dirPath.Length)}"); index++; } Console.Write("What file would you like to Deserialize: "); } }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); this.RequestWindowFeature(WindowFeatures.NoTitle); this.Window.AddFlags(WindowManagerFlags.Fullscreen | WindowManagerFlags.TurnScreenOn); SetContentView(Resource.Layout.Settings); var items = new List <int>() { 0, 1, 2, 3, 4, 5, 6 }; var axisAdapter = new ArrayAdapter <int>(this, Android.Resource.Layout.SimpleSpinnerItem, items); var spinner = FindViewById <Spinner>(Resource.Id.lxSpinner); spinner.Adapter = axisAdapter; spinner.SetSelection(Preferences.lxAxis); spinner.ItemSelected += (sender, args) => { Preferences.lxAxis = args.Position; Preferences.save(); }; spinner = FindViewById <Spinner>(Resource.Id.lySpinner); spinner.Adapter = axisAdapter; spinner.SetSelection(Preferences.lyAxis); spinner.ItemSelected += (sender, args) => { Preferences.lyAxis = args.Position; Preferences.save(); }; spinner = FindViewById <Spinner>(Resource.Id.rxSpinner); spinner.Adapter = axisAdapter; spinner.SetSelection(Preferences.rxAxis); spinner.ItemSelected += (sender, args) => { Preferences.rxAxis = args.Position; Preferences.save(); }; spinner = FindViewById <Spinner>(Resource.Id.rySpinner); spinner.Adapter = axisAdapter; spinner.SetSelection(Preferences.ryAxis); spinner.ItemSelected += (sender, args) => { Preferences.ryAxis = args.Position; Preferences.save(); }; var joyItems = new List <string>() { "Generic", "PS3/PS4" }; var joyAdapter = new ArrayAdapter <string>(this, Android.Resource.Layout.SimpleSpinnerItem, joyItems); var joyTypeSpinner = FindViewById <Spinner>(Resource.Id.joystickTypeSpinner); joyTypeSpinner.Adapter = joyAdapter; joyTypeSpinner.SetSelection(Preferences.joyType); joyTypeSpinner.ItemSelected += (sender, args) => { Preferences.setJoyType(args.Position); Preferences.save(); }; var onScreenJoySwitch = FindViewById <Switch>(Resource.Id.onScreenJoySwitch); onScreenJoySwitch.Checked = Preferences.onScreenJoy; onScreenJoySwitch.CheckedChange += (sender, args) => { Preferences.onScreenJoy = args.IsChecked; Preferences.save(); }; var evItems = new List <double>() { -3.0, -2.7, -2.3, -2.0, -1.7, -1.3, -1.0, -0.7, -0.3, 0, 0.3, 0.7, 1.0, 1.3, 1.7, 2.0, 2.3, 2.7, 3.0 }; var evAdapter = new ArrayAdapter <double>(this, Android.Resource.Layout.SimpleSpinnerItem, evItems); var evSpinner = FindViewById <Spinner>(Resource.Id.exposureSpinner); evSpinner.Adapter = evAdapter; evSpinner.SetSelection(Preferences.exposure); evSpinner.ItemSelected += (sender, args) => { Preferences.exposure = args.Position; Tello.setEV(Preferences.exposure); Preferences.save(); }; var vbrItems = new List <string>() { "Auto", "1M", "1.5M", "2M", "3M", "4M" }; var vbrAdapter = new ArrayAdapter <string>(this, Android.Resource.Layout.SimpleSpinnerItem, vbrItems); var vbrSpinner = FindViewById <Spinner>(Resource.Id.vbrSpinner); vbrSpinner.Adapter = vbrAdapter; vbrSpinner.SetSelection(Preferences.videoBitRate); vbrSpinner.ItemSelected += (sender, args) => { Preferences.videoBitRate = args.Position; Tello.setVideoBitRate(Preferences.videoBitRate); Preferences.save(); }; //2,5,10,20,40 var iframeRateItems = new List <string>() { "10/s", "4/s", "2/s", "1/s", "0.5/s" }; var iframeRateAdapter = new ArrayAdapter <string>(this, Android.Resource.Layout.SimpleSpinnerItem, iframeRateItems); var iframeRateSpinner = FindViewById <Spinner>(Resource.Id.iframeRateSpinner); iframeRateSpinner.Adapter = iframeRateAdapter; switch (Preferences.iFrameRate) { case 2: iframeRateSpinner.SetSelection(0); break; case 5: iframeRateSpinner.SetSelection(1); break; case 10: iframeRateSpinner.SetSelection(2); break; case 20: iframeRateSpinner.SetSelection(3); break; case 40: iframeRateSpinner.SetSelection(4); break; } iframeRateSpinner.ItemSelected += (sender, args) => { switch (args.Position) { case 0: Preferences.iFrameRate = 2; break; case 1: Preferences.iFrameRate = 5; break; case 2: Preferences.iFrameRate = 10; break; case 3: Preferences.iFrameRate = 20; break; case 4: Preferences.iFrameRate = 40; break; } Tello.iFrameRate = Preferences.iFrameRate; Preferences.save(); }; var cacheVideoSwitch = FindViewById <Switch>(Resource.Id.cacheVideoSwitch); cacheVideoSwitch.Checked = Preferences.cacheVideo; cacheVideoSwitch.CheckedChange += (sender, args) => { Preferences.cacheVideo = args.IsChecked; Preferences.save(); }; var photoQualitySwitch = FindViewById <Switch>(Resource.Id.photoQualitySwitch); photoQualitySwitch.Checked = Preferences.jpgQuality > 0; photoQualitySwitch.CheckedChange += (sender, args) => { Preferences.jpgQuality = args.IsChecked ? 1 : 0; Preferences.save(); Tello.setJpgQuality(Preferences.jpgQuality); }; //Settings button Button convertVideoButton = FindViewById <Button>(Resource.Id.convertVideoButton); convertVideoButton.Click += async delegate { if (Tello.connected && Tello.state.flying) { return;//Don't allow convert when flying. } if (true) { try { FileData fileData = await CrossFilePicker.Current.PickFile(); if (fileData == null) { return; // user canceled file picking } string fileName = fileData.FileName; //string contents = System.Text.Encoding.UTF8.GetString(fileData.DataArray); Console.WriteLine(fileData.FilePath); System.Console.WriteLine("File name chosen: " + fileName); //System.Console.WriteLine("File data: " + contents); RunOnUiThread(async() => { try { if (!fileName.ToLower().EndsWith(".h264")) { Toast.MakeText(Application.Context, "Error. Can only convert .h264 files", ToastLength.Long).Show(); return; } var videoConverter = new aTello.VideoConverter(); var result = await videoConverter.ConvertFileAsync(this, new Java.IO.File(fileData.FilePath)); Toast.MakeText(Application.Context, "Video Conversion. Result:" + result, ToastLength.Long).Show(); }catch (Exception ex) { Toast.MakeText(Application.Context, "Video Conversion. FAIL:" + ex.Message, ToastLength.Long).Show(); } }); } catch (Exception ex) { System.Console.WriteLine("Exception choosing file: " + ex.ToString()); } return; } }; Button convertAllVideoButton = FindViewById <Button>(Resource.Id.convertAllVideoButton); convertAllVideoButton.Click += async delegate { if (Tello.connected && Tello.state.flying) { return;//Don't allow convert when flying. } var path = Path.Combine(Android.OS.Environment.ExternalStorageDirectory.Path, "aTello/video/"); Java.IO.File f = new Java.IO.File(path); var files = f.ListFiles().ToList(); //append cache files to list. path = Path.Combine(Android.OS.Environment.ExternalStorageDirectory.Path, "aTello/video/cache"); f = new Java.IO.File(path); files.AddRange(f.ListFiles()); foreach (Java.IO.File inFile in files) { if (!inFile.IsDirectory && inFile.Name.EndsWith(".h264")) { RunOnUiThread(async() => { var videoConverter = new aTello.VideoConverter(); var inF = new Java.IO.File(inFile.Path); var result = await videoConverter.ConvertFileAsync(this, inF); Toast.MakeText(Application.Context, "Video Converted. Result:" + result, ToastLength.Long).Show(); if (result.StartsWith("Success")) { inF.Delete(); } }); } } }; Button sharePhotoButton = FindViewById <Button>(Resource.Id.sharePhotoButton); sharePhotoButton.Click += async delegate { if (Tello.connected && Tello.state.flying) { return;//Don't allow convert when flying. } var uri = Android.Net.Uri.FromFile(new Java.IO.File(Tello.picPath)); shareImage(uri); return; }; //EditText text = FindViewById<EditText>(Resource.Id.maxHeightText); //text.AfterTextChanged += delegate { // Tello.setMaxHeight(int.Parse(text.Text)); //}; //text = FindViewById<EditText>(Resource.Id.exposureText); //text.AfterTextChanged += delegate { // Tello.setEV(int.Parse(text.Text)); //}; //text = FindViewById<EditText>(Resource.Id.attAngleText); //text.AfterTextChanged += delegate { // Tello.setAttAngle(int.Parse(text.Text)); //}; //text = FindViewById<EditText>(Resource.Id.eisText); //text.AfterTextChanged += delegate { // Tello.setEIS(int.Parse(text.Text)); //}; }
private async Task <MarkdownFileModel> GetFileHandlerModelV2Async(FileHandlerActivationParameters input, bool allowInteractiveLogin = true) { if (input == null) { return(MarkdownFileModel.GetErrorModel(new FileHandlerActivationParameters(), "No activation parameters were provided.")); } // Retrieve an access token so we can make API calls string accessToken = null; try { accessToken = await AuthHelper.GetUserAccessTokenSilentAsync(input.ResourceId, allowInteractiveLogin); } catch (Exception ex) { return(MarkdownFileModel.GetErrorModel(input, ex)); } // Check user's permissions HttpClient client = new HttpClient(); client.BaseAddress = new Uri(ConfigurationManager.AppSettings["sc:ApiServer"]); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); client.DefaultRequestHeaders.Add("Api-Key", ConfigurationManager.AppSettings["sc:ApiKey"]); HttpRequestMessage request = new HttpRequestMessage(HttpMethod.Post, "api/v2/office365/isAllowed"); request.Content = new StringContent("{\"email\":\"" + input.UserId + "\"}", Encoding.UTF8, "application/json"); HttpResponseMessage response = await client.SendAsync(request); if (response.StatusCode != HttpStatusCode.OK) { return(MarkdownFileModel.GetErrorModel(input, "Access Denied")); } // Get file content FileData results = null; string filepath = ""; try { UriBuilder downloadUrlBuilder = new UriBuilder(input.ItemUrls.First()); downloadUrlBuilder.Path += "/content"; results = await HttpHelper.Default.DownloadFileAsync(downloadUrlBuilder.ToString(), accessToken); // download file filepath = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + ".docx"); Stream stream = await HttpHelper.Default.GetStreamContentForUrlAsync(downloadUrlBuilder.ToString(), accessToken); FileStream file = System.IO.File.Create(filepath); stream.CopyTo(file); file.Close(); stream.Close(); } catch (Exception ex) { return(MarkdownFileModel.GetErrorModel(input, ex)); } return(MarkdownFileModel.GetWriteableModel(input, results.Filename, results.Content, filepath)); }
//建立编译文件 public void CreateCompile(bool build) { string[] fileNames; //获取要生成的文件 if (genConfig != null) { if (genConfig.controllerFiles.Count > 0) { fileNames = new string[genConfig.controllerFiles.Count]; for (int i = 0; i < fileNames.Length; i++) { fileNames[i] = Frame.MapPath(genConfig.controllerFiles[i]); } } else { return; } } else { if (!Directory.Exists(Frame.MapPath(config.Controller))) { return; } //获取所有的controls文件 fileNames = Directory.GetFiles(Frame.MapPath(config.Controller), "*.cs", SearchOption.AllDirectories); if (fileNames == null || fileNames.Length < 1) { return; } } //生成的代码 string compileCode = ""; //类名称 string ClassName = ""; //前面的字符串部分添加或删除后,后面的代码的相对位置 int relative_postion = 0; if (fileNames.Length > 0) { Compiler com = new Compiler(); FileData fileData = null; ClassData classData = null; MethodData methodData = null; StreamWriter sw = null; VTemplate.Engine.TemplateDocument swDebug = null; //VTemplate.Engine.TemplateDocument swAspx = null; string compileFileName = ""; string compileDir = ""; string debugFileName = ""; string methodContent = ""; for (int i = 0; i < fileNames.Length; i++) { fileData = fileData = com.GetFileData(Frame.appRoot, this.config.APP, fileNames[i], System.Text.Encoding.UTF8); ClassName = Path.GetFileNameWithoutExtension(fileNames[i]); classData = fileData.GetClassData(ClassName); if (classData.MethodDataList.Count > 0) { for (int j = 0; j < classData.MethodDataList.Count; j++) { //只有公开方法才能访问 if (classData.MethodDataList[j].isPublic) { relative_postion = 0; compileCode = fileData.csharpCode.ToString(); methodData = classData.MethodDataList[j]; //相对 compileDir = config.ChangeControllerName(config.Web + config.defaultStyle, (fileData.nameSpace + '.' + ClassName).Replace('.', '/')) + "/"; //绝对 compileFileName = Frame.MapPath(compileDir + methodData.name + ".cs"); if (!Directory.Exists(Path.GetDirectoryName(compileFileName))) { Directory.CreateDirectory(Path.GetDirectoryName(compileFileName)); } //调试文件 debugFileName = Frame.MapPath(compileDir + methodData.name + ".html"); //输出调试文件 if (!File.Exists(debugFileName)) { swDebug = new VTemplate.Engine.TemplateDocument(Frame.MapPath("/NFinal/Template/Debug.tpl"), System.Text.Encoding.UTF8); swDebug.SetValue("Url", config.ChangeControllerName(config.APP, (fileData.nameSpace + '.' + ClassName).Replace('.', '/')) + "/" + methodData.name + ".htm"); swDebug.RenderTo(debugFileName, System.Text.Encoding.UTF8); } relative_postion += Replace(ref compileCode, relative_postion + fileData.start, fileData.length, "namespace " + (Frame.AssemblyTitle + compileDir.TrimEnd('/')).Replace('/', '.') + "\r\n{"); relative_postion += Replace(ref compileCode, relative_postion + classData.start, classData.length, "public class " + methodData.name + "Action " + (string.IsNullOrEmpty(classData.baseName) ? "" : " : " + classData.baseName) + "\r\n\t{" //添加初始化函数 + "\r\n\t\tpublic " + methodData.name + "Action(System.IO.TextWriter tw):base(tw){}" + "\r\n\t\tpublic " + methodData.name + "Action(string fileName) : base(fileName) {}"); //循环内部所有方法 for (int k = 0; k < classData.MethodDataList.Count; k++) { methodData = classData.MethodDataList[k]; //如果两个相等,则进行替换 if (j == k || (!classData.MethodDataList[k].isPublic)) { #region "替换原有方法" //排除非公开和非基类的方法,替换原有方法体 //if (methodData.isPublic) { methodContent = methodData.Content; SqlCompiler sqlCompiler = new NFinal.Compile.SqlCompiler(); //从代码中分析出数据库相关函数 List <DbFunctionData> dbFunctions = sqlCompiler.Compile(com.DeleteComment(methodContent)); if (dbFunctions.Count > 0) { SqlAnalyse analyse = new SqlAnalyse(); //与数据库相结合,从sql语句中分析出所有的表信息,列信息 methodData.dbFunctions = analyse.FillFunctionDataList(NFinal.DB.Coding.DB.DbStore, dbFunctions); } //数据库函数替换 int content_relative_position = 0; string StructDatas = string.Empty; if (dbFunctions.Count > 0) { bool hasSameVarName = false; List <string> varNames = new List <string>(); //添加struct类型 for (int s = 0; s < dbFunctions.Count; s++) { //去除重复项 if (varNames.Count > 0) { hasSameVarName = false; for (int c = 0; c < varNames.Count; c++) { //如果发现重复项,则跳过循环 if (varNames[c] == dbFunctions[s].varName) { hasSameVarName = true; break; } } if (hasSameVarName) { continue; } } varNames.Add(dbFunctions[s].varName); //分析出sql返回的List<dynamic>和dynamic类型是否有用AddNewField(string fileName,Type t);添加相关的类型 NewField newFiled = new NewField(dbFunctions[s].varName); List <NFinal.Compile.StructField> structFieldList = newFiled.GetFields(ref methodContent, methodData.name); //添加struct字段 string StructData = sqlCompiler.SetMagicStruct(methodData.name, dbFunctions[s], structFieldList, Frame.appRoot); StructDatas += StructData; if (!string.IsNullOrEmpty(StructData)) { compileCode = compileCode.Insert(methodData.start + relative_postion, StructData); relative_postion += StructData.Length; } } //更换函数中的数据库操作 content_relative_position += sqlCompiler.SetMagicFunction(methodData.name, ref methodContent, content_relative_position, methodData.dbFunctions, Frame.appRoot); //分析并更换其中的连接字符串 content_relative_position += sqlCompiler.SetMagicConnection(methodData.name, ref methodContent, Frame.appRoot ); } if (methodData.parameterTypeAndNames != string.Empty) { relative_postion += Replace(ref compileCode, relative_postion + methodData.parametersIndex, methodData.parametersLength, methodData.parameterTypeAndNames); } //从代码中分析出views函数 NFinal.Compile.ViewCompiler viewCompiler = new NFinal.Compile.ViewCompiler(); List <ViewData> views = viewCompiler.Compile(methodContent); //模版替换 if (views.Count > 0) { content_relative_position = 0; content_relative_position = viewCompiler.SetMagicFunction(ref methodContent, content_relative_position, fileData.nameSpace, ClassName, methodData.name, views, config); } if (build) { relative_postion += Replace(ref compileCode, relative_postion + methodData.position, methodData.Content.Length, methodContent); } else { relative_postion += Replace(ref compileCode, relative_postion + methodData.position, methodData.Content.Length, string.Empty); } //生成自动提示类 //views,Structs,DBFunctions AutoCompleteCompiler autoComplete = new AutoCompleteCompiler(); autoComplete.Compile(classData.baseName, methodData, StructDatas, views, fileData.nameSpace, ClassName, config); } #endregion } //如果两个不相等 else { compileCode = compileCode.Remove(relative_postion + classData.MethodDataList[k].start, classData.MethodDataList[k].length + classData.MethodDataList[k].Content.Length + 1);//去掉最后一个} relative_postion -= classData.MethodDataList[k].length + classData.MethodDataList[k].Content.Length + 1; } } //写aspx页面的自动提示层 //写Web层.class文件 sw = new StreamWriter(compileFileName, false, System.Text.Encoding.UTF8); sw.Write(compileCode); sw.Close(); } } } } } }
public static Guid?GetGuidOrNull(this FileData data) { return(data != null ? (Guid?)data.FileID : null); }
public static void IsZipSameAsDir(Stream archiveFile, string directory, ZipArchiveMode mode, bool requireExplicit, bool checkTimes) { int count = 0; using (ZipArchive archive = new ZipArchive(archiveFile, mode)) { List <FileData> files = FileData.InPath(directory); Assert.All <FileData>(files, (file) => { count++; string entryName = file.FullName; if (file.IsFolder) { entryName += Path.DirectorySeparatorChar; } ZipArchiveEntry entry = archive.GetEntry(entryName); if (entry == null) { entryName = FlipSlashes(entryName); entry = archive.GetEntry(entryName); } if (file.IsFile) { Assert.NotNull(entry); long givenLength = entry.Length; var buffer = new byte[entry.Length]; using (Stream entrystream = entry.Open()) { entrystream.Read(buffer, 0, buffer.Length); #if NETCOREAPP uint zipcrc = entry.Crc32; Assert.Equal(CRC.CalculateCRC(buffer), zipcrc); #endif if (file.Length != givenLength) { buffer = NormalizeLineEndings(buffer); } Assert.Equal(file.Length, buffer.Length); ulong crc = CRC.CalculateCRC(buffer); Assert.Equal(file.CRC, crc.ToString()); } if (checkTimes) { const int zipTimestampResolution = 2; // Zip follows the FAT timestamp resolution of two seconds for file records DateTime lower = file.LastModifiedDate.AddSeconds(-zipTimestampResolution); DateTime upper = file.LastModifiedDate.AddSeconds(zipTimestampResolution); Assert.InRange(entry.LastWriteTime.Ticks, lower.Ticks, upper.Ticks); } Assert.Equal(file.Name, entry.Name); Assert.Equal(entryName, entry.FullName); Assert.Equal(entryName, entry.ToString()); Assert.Equal(archive, entry.Archive); } else if (file.IsFolder) { if (entry == null) //entry not found { string entryNameOtherSlash = FlipSlashes(entryName); bool isEmtpy = !files.Any( f => f.IsFile && (f.FullName.StartsWith(entryName, StringComparison.OrdinalIgnoreCase) || f.FullName.StartsWith(entryNameOtherSlash, StringComparison.OrdinalIgnoreCase))); if (requireExplicit || isEmtpy) { Assert.Contains("emptydir", entryName); } if ((!requireExplicit && !isEmtpy) || entryName.Contains("emptydir")) { count--; //discount this entry } } else { using (Stream es = entry.Open()) { try { Assert.Equal(0, es.Length); } catch (NotSupportedException) { try { Assert.Equal(-1, es.ReadByte()); } catch (Exception) { Console.WriteLine("Didn't return EOF"); throw; } } } } } }); Assert.Equal(count, archive.Entries.Count); } }
/// <summary> /// Writes the header part to an output stream. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="offset">The absolute offset in the stream to write at.</param> /// <param name="part8">The data to write.</param> /// <param name="p">Progress info.</param> /// <returns>An async task.</returns> internal async Task WriteHeaderPart8Async(Stream stream, UInt64 offset, NefsHeaderPart8 part8, NefsProgress p) { await FileData.WriteDataAsync(stream, offset, part8, NefsVersion.Version200, p); }