public static byte[] ProcessXslFo(string xslfo, string name) { var foUserAgent = FopFactory.newFOUserAgent(); foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)"); foUserAgent.setTitle(name); var outputStream = new java.io.ByteArrayOutputStream(); var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream); var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl(); var transformer = transformerFactory.newTransformer(); var source = new StreamSource(new java.io.StringReader(xslfo)); var result = new SAXResult(fop.getDefaultHandler()); transformer.transform(source, result); /* * Adding the page count requires a second pass. This should be configurable * by the report itself. * */ /* transformer.setParameter("page-count", fop.getResults().getPageCount().ToString()); transformer.transform(src, res); * */ outputStream.close(); return outputStream.toByteArray(); }
private async Task MigrateIndexes() { var response = await RunWithAuthRetry(async() => { var url = $"{Options.ServerUrl}/databases/{Options.DatabaseName}/indexes"; var request = new HttpRequestMessage(HttpMethod.Get, url); var responseMessage = await Parameters.HttpClient.SendAsync(request, Parameters.CancelToken.Token); return(responseMessage); }); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export indexes from server: {Options.ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (var indexesStream = new ArrayStream(responseStream, "Indexes")) // indexes endpoint returns an array using (Parameters.Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(indexesStream, context, Parameters.Database)) { var destination = new DatabaseDestination(Parameters.Database); var options = new DatabaseSmugglerOptionsServerSide { RemoveAnalyzers = Options.RemoveAnalyzers, }; var smuggler = new DatabaseSmuggler(Parameters.Database, source, destination, Parameters.Database.Time, options, Parameters.Result, Parameters.OnProgress, Parameters.CancelToken.Token); smuggler.Execute(); } }
/// <summary> /// Restore CompareExchange, Identities and Subscriptions from smuggler file when restoring snapshot. /// </summary> /// <param name="onProgress"></param> /// <param name="database"></param> /// <param name="smugglerFile"></param> /// <param name="context"></param> protected async Task RestoreFromSmugglerFile(Action <IOperationProgress> onProgress, DocumentDatabase database, string smugglerFile, DocumentsOperationContext context) { var destination = new DatabaseDestination(database); var smugglerOptions = new DatabaseSmugglerOptionsServerSide { AuthorizationStatus = AuthorizationStatus.DatabaseAdmin, OperateOnTypes = DatabaseItemType.CompareExchange | DatabaseItemType.Identities | DatabaseItemType.Subscriptions, SkipRevisionCreation = true }; var lastPath = GetSmugglerBackupPath(smugglerFile); using (var zip = await GetZipArchiveForSnapshot(lastPath)) { foreach (var entry in zip.Entries) { if (entry.Name == RestoreSettings.SmugglerValuesFileName) { using (var input = entry.Open()) using (var inputStream = GetSnapshotInputStream(input, database.Name)) using (var uncompressed = new GZipStream(inputStream, CompressionMode.Decompress)) { var source = new StreamSource(uncompressed, context, database); var smuggler = new Smuggler.Documents.DatabaseSmuggler(database, source, destination, database.Time, smugglerOptions, onProgress: onProgress, token: _operationCancelToken.Token); smuggler.Execute(ensureStepsProcessed: true, isLastFile: true); } break; } } } }
private async Task MigrateDocuments(string lastEtag) { var url = $"{ServerUrl}/databases/{DatabaseName}/streams/docs?etag={lastEtag}"; var request = new HttpRequestMessage(HttpMethod.Get, url); var response = await HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export documents from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(responseStream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide { ReadLegacyEtag = true }; var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); // since we will be migrating indexes as separate task don't ensureStepsProcessed at this point smuggler.Execute(ensureStepsProcessed: false); } }
public string StoreMeta(JObject meta) { var path = $"{_id}/{ArchiveSession.SessionFileName}"; var ms = new MemoryStream(); var writer = new StreamWriter(ms); var jsonWriter = new JsonTextWriter(writer); var serializer = new JsonSerializer(); serializer.Serialize(jsonWriter, meta); jsonWriter.Flush(); ms.Position = 0; _zipFile.BeginUpdate(); var ss = new StreamSource(ms); _zipFile.Add(ss, path, CompressionMethod.Stored); _zipFile.CommitUpdate(); ms.Close(); return(path); }
public SerializationContext?GetSubContext([CanBeNull] string relativeFile, ContextMode contextMode) { if (relativeFile == null) { return(this); } if (_childContexts.TryGetValue(relativeFile, out var context)) { return(context); } var streamSource = StreamSource.OpenSideLocation(relativeFile); if (streamSource == null) { return(null); } if (_backgroundStream == null) { return(null); } context = new SerializationContext(contextMode, streamSource, SerializerMode, _backgroundStream); _childContexts[relativeFile] = context; return(context); }
private string GetProcArguments( StreamSource streamSource, int procID) { var root = $"{_chunkerConfig.ChunkStorageDir}/{streamSource.Name}/"; var segmentFilename = root + $"%Y/%m/%d/%H/%M/%s-%%06d-{procID}.ts"; var m3u8File = root + "index.m3u8"; return(string.Join(" ", new[] { "-err_detect ignore_err", "-reconnect 1", //"-reconnect_at_eof 1", "-reconnect_streamed 1", "-reconnect_delay_max 300", "-y -re", "-hide_banner", "-i " + streamSource.Link, "-map 0", "-codec:v copy -codec:a copy -c copy", "-f hls", "-hls_time " + streamSource.ChunkTime, "-use_localtime 1 -use_localtime_mkdir 1", "-hls_flags second_level_segment_index", "-hls_segment_filename " + segmentFilename, m3u8File })); }
private string GenerateProcArguments( StreamSource streamCfg, int procID, int startID) { var root = _ffmpegCfg.ChunkStorageDir + "/" + streamCfg.Name + "/"; var segmentFilename = root + $"%Y/%m/%d/%H/%M/%s-%%06d-{procID:0000000}.ts"; var m3u8File = root + "index.m3u8"; return(string.Join(" ", new[] { "-err_detect ignore_err", "-reconnect 1 -reconnect_at_eof 1", "-reconnect_streamed 1 -reconnect_delay_max 300", "-y -re", "-hide_banner", "-i " + streamCfg.Link, "-map 0", "-start_number " + startID, "-codec:v copy -codec:a copy -c copy", "-f hls", "-hls_time " + streamCfg.ChunkTime, "-use_localtime 1 -use_localtime_mkdir 1", "-hls_flags second_level_segment_index", "-hls_segment_filename " + segmentFilename, m3u8File })); }
private int GetLastProducedIndex(StreamSource streamCfg) { var chunksRoot = Path.Combine( _ffmpegCfg.ChunkStorageDir, streamCfg.Name); if (!Directory.Exists(chunksRoot)) { return(-1); } var files = Directory.GetFiles( chunksRoot, "*.ts", SearchOption.AllDirectories); var mostRecent = files.OrderByDescending(File.GetCreationTime) .FirstOrDefault(); if (mostRecent == null) { return(-1); } File.Delete(mostRecent); var mostRecentChunk = ChunkFileLoader.Load(mostRecent); return(mostRecentChunk.index); }
public virtual void SetUp() { Scheduler = new TaskScheduler(); TestFile = Path.GetTempFileName(); Storage = new AlternateStreamSource(TestFile); }
/// <summary> /// Supply the instance document to be validated in the form of a Stream /// </summary> /// <param name="source">A stream containing the XML document to be parsed /// and validated.</param> /// <param name="baseUri">The base URI to be used for resolving any relative /// references, for example a reference to an <c>xsi:schemaLocation</c></param> public void SetSource(Stream source, Uri baseUri) { StreamSource ss = new StreamSource(new DotNetInputStream(source)); ss.setSystemId(baseUri.ToString()); this.source = ss; }
public static Device Open(StreamSource source, Stream stream, byte[] buffer) { stream.Position = 0; MasterBootRecord record = MasterBootRecord.Read(stream, buffer); stream.Close(); return new Device(source, record); }
public async Task Documents() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var stream = new ArrayStream(RequestBodyStream(), "Docs")) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide { ReadLegacyEtag = true, OperateOnTypes = DatabaseItemType.Documents }; var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options); var result = smuggler.Execute(); var replicationSource = GetSourceReplicationInformation(context, GetRemoteServerInstanceId(), out var documentId); replicationSource.LastDocumentEtag = result.LegacyLastDocumentEtag; replicationSource.Source = GetFromServer(); replicationSource.LastBatchSize = result.Documents.ReadCount + result.Tombstones.ReadCount; replicationSource.LastModified = DateTime.UtcNow; await SaveSourceReplicationInformation(replicationSource, context, documentId); } }
public async Task GetImport() { if (HttpContext.Request.Query.ContainsKey("file") == false && HttpContext.Request.Query.ContainsKey("url") == false) { throw new ArgumentException("'file' or 'url' are mandatory when using GET /smuggler/import"); } using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { var options = DatabaseSmugglerOptionsServerSide.Create(HttpContext); using (var stream = new GZipStream(new BufferedStream(await GetImportStream(), 128 * Voron.Global.Constants.Size.Kilobyte), CompressionMode.Decompress)) using (var token = CreateOperationToken()) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, token: token.Token); var result = smuggler.Execute(); WriteImportResult(context, result, ResponseBodyStream()); } } }
protected void WriteDocumentWithAttachment(IDocumentActions documentActions, DocumentsOperationContext context, Stream dataStream, string key, BlittableJsonReaderObject metadata) { using (dataStream) { var attachment = new DocumentItem.AttachmentStream { Stream = documentActions.GetTempStream() }; var attachmentDetails = StreamSource.GenerateLegacyAttachmentDetails(context, dataStream, key, metadata, ref attachment); var dummyDoc = new DocumentItem { Document = new Document { Data = StreamSource.WriteDummyDocumentForAttachment(context, attachmentDetails), Id = attachmentDetails.Id, ChangeVector = string.Empty, Flags = DocumentFlags.HasAttachments, NonPersistentFlags = NonPersistentDocumentFlags.FromSmuggler, LastModified = _database.Time.GetUtcNow(), }, Attachments = new List <DocumentItem.AttachmentStream> { attachment } }; documentActions.WriteDocument(dummyDoc, Result.Documents); } }
/// <summary> /// Compile a schema supplied as a Stream. The resulting schema components are added /// to the cache. /// </summary> /// <param name="input">A stream containing the source text of the schema. This method /// will consume the supplied stream. It is the caller's responsibility to close the stream /// after use.</param> /// <param name="baseUri">The base URI of the schema document, for resolving any references to other /// schema documents</param> public void Compile(Stream input, Uri baseUri) { StreamSource ss = new StreamSource(new JDotNetInputStream(input)); ss.setSystemId(baseUri.ToString()); schemaManager.load(ss); }
/// <summary> /// Add an instance document to the list of documents to be validated. /// </summary> /// <param name="source">Stream source of the document</param> /// <param name="baseUri">Base Uri of the source document</param> public void AddSource(Stream source, Uri baseUri) { StreamSource ss = new StreamSource(new JDotNetInputStream(source)); ss.setSystemId(baseUri.ToString()); sources.Add(ss); }
public Source[] resolve(String targetNamespace, String baseURI, String[] locations) { Uri baseU = (baseURI == null ? null : new Uri(baseURI)); Uri[] modules = resolver.GetSchemaDocuments(targetNamespace, baseU, locations); StreamSource[] ss = new StreamSource[modules.Length]; for (int i = 0; i < ss.Length; i++) { ss[i] = new StreamSource(); ss[i].setSystemId(modules[i].ToString()); Object doc = resolver.GetEntity(modules[i]); if (doc is Stream) { ss[i].setInputStream(new DotNetInputStream((Stream)doc)); } else if (doc is String) { ss[i].setReader(new DotNetReader(new StringReader((String)doc))); } else { throw new ArgumentException("Invalid response from GetEntity()"); } } return(ss); }
/// <summary> /// Resolve the specified targetNamespace, baseURI and locations. /// </summary> /// <param name="targetNamespace">Target namespace.</param> /// <param name="baseURI">BaseURI.</param> /// <param name="locations">Locations.</param> public Source[] resolve(String targetNamespace, String baseURI, String[] locations) { if (config.isSchemaAvailable(targetNamespace) && !(java.lang.Boolean.valueOf(((java.lang.Object)config.getConfigurationProperty(JFeatureKeys.MULTIPLE_SCHEMA_IMPORTS)).toString()).booleanValue())) { return(new Source[0]); } Uri baseU = (baseURI == null ? null : new Uri(baseURI)); Uri[] modules = resolver.GetSchemaDocuments(targetNamespace, baseU, locations); StreamSource[] ss = new StreamSource[modules.Length]; for (int i = 0; i < ss.Length; i++) { ss[i] = new StreamSource(); ss[i].setSystemId(modules[i].ToString()); Object doc = resolver.GetEntity(modules[i]); if (doc is Stream) { ss[i].setInputStream(new JDotNetInputStream((Stream)doc)); } else if (doc is String) { ss[i].setReader(new JDotNetReader(new StringReader((String)doc))); } else { throw new ArgumentException("Invalid response from GetEntity()"); } } return(ss); }
public SourceTransformation( string name, StreamSource <T, ISourceFunction <T> > @operator, TypeInformation <T> outputType, int parallelism) : this(name, new SimpleOperatorFactory <T>(@operator), outputType, parallelism) { }
public static byte[] ProcessXslFo(string xslfo, string name) { var foUserAgent = FopFactory.newFOUserAgent(); foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)"); foUserAgent.setTitle(name); var outputStream = new java.io.ByteArrayOutputStream(); var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream); var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl(); var transformer = transformerFactory.newTransformer(); var source = new StreamSource(new java.io.StringReader(xslfo)); var result = new SAXResult(fop.getDefaultHandler()); transformer.transform(source, result); /* * Adding the page count requires a second pass. This should be configurable * by the report itself. * */ /* * transformer.setParameter("page-count", fop.getResults().getPageCount().ToString()); * transformer.transform(src, res); * */ outputStream.close(); return(outputStream.toByteArray()); }
public void CloneStreamSource() { var s = new StreamSource(Stream.Null); var c = s.Clone(); Assert.IsNull(c); }
public static IVolume OpenVolume(StreamSource source, byte[] buffer) { Stream stream = source(FileAccess.ReadWrite); stream.Position = 0; VolumeBootRecord record = VolumeBootRecord.Read(stream, buffer); if (record is ThfsVolumeBootRecord) return new ThfsVolume(source, stream, (ThfsVolumeBootRecord)record); throw new Exception("Unknown volume boot record class!"); }
public Task PostCreateSampleData() { using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) { using (context.OpenReadTransaction()) { foreach (var collection in Database.DocumentsStorage.GetCollections(context)) { if (collection.Count > 0) { throw new InvalidOperationException("You cannot create sample data in a database that already contains documents"); } } } using (var sampleData = typeof(SampleDataHandler).GetTypeInfo().Assembly .GetManifestResourceStream("Raven.Server.Web.Studio.EmbeddedData.Northwind_3.5.35168.ravendbdump")) { using (var stream = new GZipStream(sampleData, CompressionMode.Decompress)) using (var source = new StreamSource(stream, context, Database)) { var destination = new DatabaseDestination(Database); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time); smuggler.Execute(); } } return(NoContent()); } }
public static List <string> ListResources(StreamSource source, Assembly assembly = null) { if (source == StreamSource.Default) { throw new InvalidDataException("StreamSource.Default is too ambiguous"); } var result = new List <string>(); if (source == StreamSource.Local) { var fileNames = Directory.GetFiles(FolderPath);// Storage.GetFileNames(); if (fileNames != null && fileNames.Length > 0) { foreach (var fileName in fileNames) { result.Add(fileName); } } } else if (source == StreamSource.EmbeddedResource) { var resourceNames = assembly.GetManifestResourceNames(); foreach (string resourceName in resourceNames) { result.Add(resourceName); } } return(result); }
private async Task MigrateIndexes() { var url = $"{ServerUrl}/databases/{DatabaseName}/indexes"; var request = new HttpRequestMessage(HttpMethod.Get, url); var response = await _client.SendAsync(request, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export indexes from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) // indexes endpoint returns an array using (var indexesStream = new IndexesStream(responseStream)) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(indexesStream, context, Database)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide(); var smuggler = new DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); smuggler.Execute(); } }
private async Task MigrateDatabase(long operationId, ImportInfo importInfo) { var startDocumentEtag = importInfo?.LastEtag ?? 0; var url = $"{ServerUrl}/databases/{DatabaseName}/smuggler/export?operationId={operationId}&startEtag={startDocumentEtag}"; var json = JsonConvert.SerializeObject(new DatabaseSmugglerOptionsServerSide()); var content = new StringContent(json, Encoding.UTF8, "application/json"); var request = new HttpRequestMessage(HttpMethod.Post, url) { Content = content }; var response = await _client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, CancelToken.Token); if (response.IsSuccessStatusCode == false) { var responseString = await response.Content.ReadAsStringAsync(); throw new InvalidOperationException($"Failed to export database from server: {ServerUrl}, " + $"status code: {response.StatusCode}, " + $"error: {responseString}"); } using (var responseStream = await response.Content.ReadAsStreamAsync()) using (var stream = new GZipStream(responseStream, mode: CompressionMode.Decompress)) using (Database.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (var source = new StreamSource(stream, context)) { var destination = new DatabaseDestination(Database); var options = new DatabaseSmugglerOptionsServerSide(); var smuggler = new Documents.DatabaseSmuggler(Database, source, destination, Database.Time, options, Result, OnProgress, CancelToken.Token); smuggler.Execute(); } }
public IAsyncResult BeginCreate(Stream stream, AsyncCallback callback, object state) { var streamSource = new StreamSource { Stream = stream }; return(AsyncMachine <VhdFile> .BeginAsyncMachine(this.CreateAsync, streamSource, callback, state)); }
/// <summary> /// Compile a schema, retrieving the source using a URI. The resulting schema components are added /// to the cache. /// </summary> /// <remarks> /// The document located via the URI is parsed using the <c>System.Xml</c> parser. /// </remarks> /// <param name="uri">The URI identifying the location where the schema document can be /// found</param> public void Compile(Uri uri) { StreamSource ss = new StreamSource(uri.ToString()); JAugmentedSource aug = JAugmentedSource.makeAugmentedSource(ss); aug.setPleaseCloseAfterUse(true); schemaManager.load(aug); }
/// <summary> /// Create a Processor, based on configuration information supplied in a configuration file. /// </summary> /// <remarks> /// Not fully supported in this release: for experimental use only. /// </remarks> /// <param name="configurationFile">A stream holding the text of the XML configuration file. Details of the file format /// can be found in the Saxon documentation.</param> /// public Processor(Stream configurationFile) { StreamSource ss = new StreamSource(new DotNetInputStream(configurationFile)); config = JConfiguration.readConfiguration(ss); config.registerExternalObjectModel(new DotNetObjectModelDefinition()); config.setProcessor(this); }
static BrotliDefaultDictionary() { Stream dict = typeof(BrotliDefaultDictionary).GetTypeInfo().Assembly.GetManifestResourceStream("BrotliLib.Brotli.Dictionary.Default.dict"); StreamSource source = new StreamSource(dict); DefaultTransforms = Enumerable.Range(0, 121).Select(index => new WordTransform(PrefixMap[index], GetTransformType(index), SuffixMap[index])).ToArray(); Embedded = new BrotliDefaultDictionary(source); }
/// <summary> /// Supply the instance document to be validated in the form of a Uri reference /// </summary> /// <param name="uri">URI of the document to be validated</param> public void SetSource(Uri uri) { StreamSource ss = new StreamSource(uri.ToString()); AugmentedSource aug = AugmentedSource.makeAugmentedSource(ss); aug.setPleaseCloseAfterUse(true); this.source = aug; }
/// <summary> /// Add an instance document to the list of documents to be validated /// </summary> /// <param name="uri">Uri of the source document</param> public void AddSource(Uri uri) { StreamSource ss = new StreamSource(uri.ToString()); JAugmentedSource aug = JAugmentedSource.makeAugmentedSource(ss); aug.setPleaseCloseAfterUse(true); sources.Add(aug); }
public static void Run() { // ExStart:ColorConversionUsingDefaultProfiles // The path to the documents directory. string dataDir = RunExamples.GetDataDir_ModifyingAndConvertingImages(); // Load an existing JPG image using (JpegImage image = (JpegImage)Image.Load(dataDir + "aspose-logo_tn.jpg")) { StreamSource rgbprofile = new StreamSource(File.OpenRead(dataDir + "rgb.icc")); StreamSource cmykprofile = new StreamSource(File.OpenRead(dataDir + "cmyk.icc")); image.DestinationRgbColorProfile = rgbprofile; image.DestinationCmykColorProfile = cmykprofile; image.Save(dataDir + "ColorConversionUsingDefaultProfiles_out.icc"); } // ExStart:ColorConversionUsingDefaultProfiles }
public static void Run() { // ExStart:ColorConversionUsingICCProfiles // The path to the documents directory. string dataDir = RunExamples.GetDataDir_ModifyingAndConvertingImages(); // Load an existing JPG image using (JpegImage image = (JpegImage)Image.Load(dataDir + "aspose-logo_tn.jpg")) { StreamSource rgbprofile = new StreamSource(File.OpenRead(dataDir + "rgb.icc")); StreamSource cmykprofile = new StreamSource(File.OpenRead(dataDir + "cmyk.icc")); image.RgbColorProfile = rgbprofile; image.CmykColorProfile = cmykprofile; Color[] colors = image.LoadPixels(new Rectangle(0, 0, image.Width, image.Height)); } // ExStart:ColorConversionUsingICCProfiles }
public static ThfsVolume Open(StreamSource source, Stream stream) { return Open(source, stream, new byte[512]); }
public static IVolume OpenVolume(StreamSource source) { return OpenVolume(source, new byte[512]); }
public static ThfsVolume Open(StreamSource source, Stream stream, byte[] buffer) { stream.Position = 0; ThfsVolumeBootRecord record = ThfsVolumeBootRecord.Read(stream, buffer); return new ThfsVolume(source, stream, record); }
public ThfsVolume(StreamSource source, Stream stream, ThfsVolumeBootRecord record) { this.vbr = record; this.source = source; this.stream = stream; // Root directory instelle (partitie name enzo is allemaal de root dir!) }
/// <summary> /// Supply the instance document to be validated in the form of a Uri reference /// </summary> /// <param name="uri">URI of the document to be validated</param> public void SetSource(Uri uri) { StreamSource ss = new StreamSource(uri.ToString()); JAugmentedSource aug = JAugmentedSource.makeAugmentedSource(ss); aug.setPleaseCloseAfterUse(true); this.source = aug; }
public Source[] resolve(String targetNamespace, String baseURI, String[] locations) { if (config.isSchemaAvailable(targetNamespace)) { return new Source[0]; } Uri baseU = (baseURI == null ? null : new Uri(baseURI)); Uri[] modules = resolver.GetSchemaDocuments(targetNamespace, baseU, locations); StreamSource[] ss = new StreamSource[modules.Length]; for (int i = 0; i < ss.Length; i++) { ss[i] = new StreamSource(); ss[i].setSystemId(modules[i].ToString()); Object doc = resolver.GetEntity(modules[i]); if (doc is Stream) { ss[i].setInputStream(new DotNetInputStream((Stream)doc)); } else if (doc is String) { ss[i].setReader(new DotNetReader(new StringReader((String)doc))); } else { throw new ArgumentException("Invalid response from GetEntity()"); } } return ss; }
/// <summary> /// Compile a schema, retrieving the source using a URI. The resulting schema components are added /// to the cache. /// </summary> /// <remarks> /// The document located via the URI is parsed using the <c>System.Xml</c> parser. /// </remarks> /// <param name="uri">The URI identifying the location where the schema document can be /// found</param> public void Compile(Uri uri) { StreamSource ss = new StreamSource(uri.ToString()); JAugmentedSource aug = JAugmentedSource.makeAugmentedSource(ss); aug.setPleaseCloseAfterUse(true); if (errorList == null) { config.addSchemaSource(aug); } else { config.addSchemaSource(aug, new ErrorGatherer(errorList)); } }
/// <summary> /// Supply the instance document to be validated in the form of a Stream /// </summary> /// <param name="source">A stream containing the XML document to be parsed /// and validated. This stream will be consumed by the validation process, /// but it will not be closed after use: that is the responsibility of the /// caller.</param> /// <param name="baseUri">The base URI to be used for resolving any relative /// references, for example a reference to an <c>xsi:schemaLocation</c></param> public void SetSource(Stream source, Uri baseUri) { StreamSource ss = new StreamSource(new DotNetInputStream(source)); ss.setSystemId(baseUri.ToString()); this.source = ss; }
/// <summary> /// Load an XML document supplied as raw (lexical) XML on a Stream. /// </summary> /// <remarks> /// <para>The document is parsed using the <c>System.Xml</c> parser.</para> /// <para>Before calling this method, the BaseUri property must be set to identify the /// base URI of this document, used for resolving any relative URIs contained within it.</para> /// <para>Note that the Microsoft <c>System.Xml</c> parser does not report whether attributes are /// defined in the DTD as being of type <c>ID</c> and <c>IDREF</c>. This is true whether or not /// DTD-based validation is enabled. This means that such attributes are not accessible to the /// <c>id()</c> and <c>idref()</c> functions.</para> /// </remarks> /// <param name="input">The Stream containing the XML source to be parsed</param> /// <returns>An <c>XdmNode</c>, the document node at the root of the tree of the resulting /// in-memory document /// </returns> public XdmNode Build(Stream input) { if (baseUri == null) { throw new ArgumentException("No base URI suppplied"); } Source source = new StreamSource(new DotNetInputStream(input)); source.setSystemId(baseUri.ToString()); source = augmentSource(source); StaticQueryContext env = new StaticQueryContext(config); //env.setURIResolver(new DotNetURIResolver(xmlResolver)); DocumentInfo doc = env.buildDocument(source); return (XdmNode)XdmValue.Wrap(doc); }
/// <summary> /// Compile a schema supplied as a Stream. The resulting schema components are added /// to the cache. /// </summary> /// <param name="input">A stream containing the source text of the schema. This method /// will consume the supplied stream. It is the caller's responsibility to close the stream /// after use.</param> /// <param name="baseUri">The base URI of the schema document, for resolving any references to other /// schema documents</param> public void Compile(Stream input, Uri baseUri) { StreamSource ss = new StreamSource(new DotNetInputStream(input)); ss.setSystemId(baseUri.ToString()); if (errorList == null) { config.addSchemaSource(ss); } else { config.addSchemaSource(ss, new ErrorGatherer(errorList)); } }
public static Device Open(StreamSource source, byte[] buffer) { return Open(source, source(FileAccess.Read), buffer); }
public Device(StreamSource source, MasterBootRecord record) { CreateStream = source; MasterBootRecord = record; }
public Task<string> Play(string file, StreamSource source, int index = 0) { if (source == StreamSource.torrent) { if (string.IsNullOrEmpty(file)) { TtvProxy.Log.Write("Не верная ссылка на торрент", TypeMessage.Error); throw new Exception("Не верная ссылка на торрент"); } file = new Uri(file).ToString(); } string str1 = string.Format("START {0} {1} ", (object)source.ToString().ToUpper(), (object)file); string str2 = source != StreamSource.contentid ? str1 + (object)index + " 0 0 0" : str1 + (object)index; this._playdTask = new Task<string>(new Func<string>(this.WaytingPlay)); this._playdTask.Start(); this.SendMessage(str2.Replace("CONTENTID", "PID")); this.PlayedFile = file; return this._playdTask; }
public static Device Open(StreamSource source, Stream stream) { return Open(source, stream, new byte[512]); }
/// <summary> /// Start this from directory \samples\samples\fop\ /// with \samples\lib populated with jni4net.j.jar and jni4net.n.dll /// and with \samples\samples\fop\lib populated with FOP jar files. /// </summary> private static void Main() { FixStartupDirectory(); // automaticaly setup Java classpath to find jni4net.j var setup = new BridgeSetup(true); // setup Java classpath to find FOP libraries setup.AddAllJarsClassPath("lib"); // we don't need to call back from Java setup.BindStatic = false; // now we create JVM and bind jni4net core Bridge.CreateJVM(setup); // now we bind all proxies of FOP objects // which are compiled in this assembly Bridge.RegisterAssembly(typeof (Program).Assembly); const string inFileName = "data/jni4net.fo"; const string outFileName = "data/jni4net.pdf"; //Below is just plain Copy&Paste of FOP basic sample java code OutputStream output = null; try { // Step 1: Construct a FopFactory // (reuse if you plan to render multiple documents!) FopFactory fopFactory = FopFactory.newInstance(); // Step 2: Set up output stream. output = new BufferedOutputStream(new FileOutputStream(new File(outFileName))); // Step 3: Construct fop with desired output format Fop fop = fopFactory.newFop(MimeConstants_.MIME_PDF, output); // Step 4: Setup JAXP using identity transformer TransformerFactory factory = TransformerFactory.newInstance(); Transformer transformer = factory.newTransformer(); // identity transformer // Step 5: Setup input and output for XSLT transformation // Setup input stream Source src = new StreamSource(new File(inFileName)); // Resulting SAX events (the generated FO) must be piped through to FOP Result res = new SAXResult(fop.getDefaultHandler()); // Step 6: Start XSLT transformation and FOP processing transformer.transform(src, res); } finally { // Clean-up if (output != null) { output.close(); } } }
public static Device Open(StreamSource source) { return Open(source, source(FileAccess.Read)); }