public ResourceTransformationArgs( Resource resource, ResourceArgs args, ResourceOptions options) { Resource = resource; Args = args; Options = options; }
UpnpPhoto GetPhoto(FSpotPhoto photo, Container parent) { UpnpPhoto upnp_photo = null; if (!photos_cache.ContainsKey(photo.Id)) { var resource_options = new ResourceOptions { ProtocolInfo = new ProtocolInfo(Protocols.HttpGet, MimeTypeHelper.GetMimeType(photo.DefaultVersion.Uri)) }; var resource_uri = new Uri(string.Format("{0}object?id={1}", prefix, upnp_photo.Id)); var photo_options = new PhotoOptions { Title = photo.Name, Rating = photo.Rating.ToString(), Description = photo.Description, Resources = new [] { new Resource(resource_uri, resource_options) } }; upnp_photo = new UpnpPhoto((id++).ToString(), parent.Id, photo_options); photos_cache.Add(photo.Id, upnp_photo); } else { upnp_photo = photos_cache [photo.Id]; } return(upnp_photo); }
/// <summary> /// Builds out an HttpWebRequest specifically configured for use with the service provider API. /// </summary> /// <param name="route"> /// The route for the request. /// </param> /// <returns> /// The configured request. /// </returns> private HttpWebRequest GetApiBaseRequest(string route, ResourceOptions options, string baseRoute = null) { baseRoute = baseRoute == null ? ServiceProviderApiPath : baseRoute; var requestString = baseRoute + route; var requestRoute = new Uri(requestString); // Create a new request. We don't want auto redirect, we don't want the subsystem trying // to look up proxy information to configure on our request, we want a 5 second timeout // on any and all operations and we want to look like Firefox in a generic way. Here we // also set the cookie container, so we can capture session cookies if we're successful. HttpWebRequest request = (HttpWebRequest)WebRequest.Create(requestRoute); request.Method = options.Method; //"POST"; request.Proxy = null; request.AllowAutoRedirect = false; request.UseDefaultCredentials = false; request.CachePolicy = new System.Net.Cache.RequestCachePolicy(System.Net.Cache.RequestCacheLevel.BypassCache); request.Timeout = 5000; request.ReadWriteTimeout = 5000; request.ContentType = options.ContentType; // "application/x-www-form-urlencoded"; request.UserAgent = "Mozilla/5.0 (Windows NT x.y; rv:10.0) Gecko/20100101 Firefox/10.0"; request.Accept = "application/json,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"; if (options.ETag != null) { request.Headers.Add("ETag", options.ETag); } return(request); }
public Dictionary <string, bool?> VerifyLists(Dictionary <string, string> hashes) { Dictionary <string, object> hashesDict = new Dictionary <string, object>(); foreach (var hash in hashes) { hashesDict.Add(hash.Key, hash.Value); } ResourceOptions options = new ResourceOptions() { ContentType = "application/json", Parameters = hashesDict }; HttpStatusCode code; bool responseReceived; byte[] ret = RequestResource(ServiceResource.RuleDataSumCheck, out code, out responseReceived, options); if (ret == null) { m_logger.Warn("No response text returned from {0}. Status code = {1}, Response received = {2}", m_namedResourceMap[ServiceResource.RuleDataSumCheck], code, responseReceived); return(null); } Dictionary <string, bool?> responseDict = Newtonsoft.Json.JsonConvert.DeserializeObject <Dictionary <string, bool?> >(Encoding.UTF8.GetString(ret)); return(responseDict); }
public void ResourceInstantiation() { var options = new ResourceOptions(); SetResourceOptions(options); var resource = new Resource(new Uri("http://0"), options); AssertResource(resource, options); AssertResource(resource, resource.GetOptions()); }
protected OptType GetOptions <OptType> (ResourceOptions res_options) where OptType : ResourceOptions { var options = res_options as OptType; if (null == options) { options = this.GetDefaultOptions() as OptType; } return(options); }
static void SetObjectOptions(ObjectOptions options) { options.Title = "1"; options.Creator = "2"; options.WriteStatus = WriteStatus.Protected; options.IsRestricted = true; var resource_options = new ResourceOptions(); SetResourceOptions(resource_options); options.Resources = new[] { new Resource(new Uri("http://0"), resource_options) }; }
/// <summary> /// Request a generic resource from the service server(s). /// This does not include the responseReceived out variable in the parameter list. /// </summary> /// <param name="resource"></param> /// <param name="code"></param> /// <param name="noLogging"></param> /// <returns></returns> public byte[] RequestResource(ServiceResource resource, out HttpStatusCode code, Dictionary <string, object> parameters = null, bool noLogging = false) { bool responseReceived = false; ResourceOptions options = new ResourceOptions(); options.Parameters = parameters; options.NoLogging = noLogging; return(RequestResource(resource, out code, out responseReceived, options)); }
private static ResourceOptions MakeResourceOptions(ResourceOptions?options, Input <string>?id) { var defaultOptions = new ResourceOptions { Version = Utilities.Version, }; var merged = ResourceOptions.Merge(defaultOptions, options); // Override the ID if one was specified for consistency with other language SDKs. merged.Id = id ?? merged.Id; return(merged); }
static void AssertResource(Resource resource, ResourceOptions options) { Assert.AreEqual(resource.Size, options.Size); Assert.AreEqual(resource.Duration, options.Duration); Assert.AreEqual(resource.BitRate, options.BitRate); Assert.AreEqual(resource.SampleFrequency, options.SampleFrequency); Assert.AreEqual(resource.BitsPerSample, options.BitsPerSample); Assert.AreEqual(resource.NrAudioChannels, options.NrAudioChannels); Assert.AreEqual(resource.Resolution, options.Resolution); Assert.AreEqual(resource.ColorDepth, options.ColorDepth); Assert.AreEqual(resource.ProtocolInfo, options.ProtocolInfo); Assert.AreEqual(resource.Protection, options.Protection); Assert.AreEqual(resource.ImportUri, options.ImportUri); }
static void SetResourceOptions(ResourceOptions options) { options.Size = 0; options.Duration = new TimeSpan(0, 0, 1); options.BitRate = 2; options.SampleFrequency = 3; options.BitsPerSample = 4; options.NrAudioChannels = 5; options.Resolution = new Resolution(6, 7); options.ColorDepth = 8; options.ProtocolInfo = new ProtocolInfo("foo"); options.Protection = "9"; options.ImportUri = new Uri("http://10"); }
internal static void MergeNormalOptions(ResourceOptions options1, ResourceOptions options2) { options1.Id = options2.Id ?? options1.Id; options1.Parent = options2.Parent ?? options1.Parent; options1.Protect = options2.Protect ?? options1.Protect; options1.Version = options2.Version ?? options1.Version; options1.Provider = options2.Provider ?? options1.Provider; options1.CustomTimeouts = options2.CustomTimeouts ?? options1.CustomTimeouts; options1.IgnoreChanges.AddRange(options2.IgnoreChanges); options1.ResourceTransformations.AddRange(options2.ResourceTransformations); options1.Aliases.AddRange(options2.Aliases); options1.DependsOn = options1.DependsOn.Concat(options2.DependsOn); }
internal static void MergeNormalOptions(ResourceOptions options1, ResourceOptions options2) { options1.Id = options2.Id ?? options1.Id; options1.Parent = options2.Parent ?? options1.Parent; options1.Protect = options2.Protect ?? options1.Protect; options1.Urn = options2.Urn ?? options1.Urn; options1.Version = options2.Version ?? options1.Version; options1.PluginDownloadURL = options2.PluginDownloadURL ?? options1.PluginDownloadURL; options1.Provider = options2.Provider ?? options1.Provider; options1.CustomTimeouts = options2.CustomTimeouts ?? options1.CustomTimeouts; options1.RetainOnDelete = options2.RetainOnDelete ?? options1.RetainOnDelete; options1.IgnoreChanges.AddRange(options2.IgnoreChanges); options1.ResourceTransformations.AddRange(options2.ResourceTransformations); options1.Aliases.AddRange(options2.Aliases); options1.ReplaceOnChanges.AddRange(options2.ReplaceOnChanges); options1.DependsOn = options1.DependsOn.Concat(options2.DependsOn); }
void Button_Click(object sender, RoutedEventArgs e) { string arc_name = Path.GetFullPath(ArchiveName.Text); if (File.Exists(arc_name)) { string text = string.Format(guiStrings.MsgOverwrite, arc_name); var rc = MessageBox.Show(this, text, guiStrings.TextConfirmOverwrite, MessageBoxButton.YesNo, MessageBoxImage.Question); if (MessageBoxResult.Yes != rc) { return; } } var format = this.ArchiveFormat.SelectedItem as ArchiveFormat; if (null != format) { ArchiveOptions = format.GetOptions(OptionsWidget.Content); } DialogResult = true; }
// TODO: GUI widget for options public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var zip_options = GetOptions <ZipOptions> (options); int callback_count = 0; using (var zip = new ZipArchive(output, ZipArchiveMode.Create, true, zip_options.FileNameEncoding)) { foreach (var entry in list) { var zip_entry = zip.CreateEntry(entry.Name, zip_options.CompressionLevel); using (var input = File.OpenRead(entry.Name)) using (var zip_file = zip_entry.Open()) { if (null != callback) { callback(++callback_count, entry, arcStrings.MsgAddingFile); } input.CopyTo(zip_file); } } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { output.Position = 0x18; int callback_count = 0; foreach (var entry in list) { using (var file = File.OpenRead(entry.Name)) { var file_size = file.Length; if (file_size > uint.MaxValue) { throw new FileSizeException(); } long file_offset = output.Position; if (file_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } entry.Offset = file_offset; entry.Size = (uint)file_size; if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } file.CopyTo(output); } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } byte[] name_buf = new byte[0x40]; var encoding = Encodings.cp932.WithFatalFallback(); int count = list.Count(); var name_offsets = new uint[count]; var name_sizes = new int[count]; int i = 0; foreach (var entry in list) { int length = encoding.GetByteCount(entry.Name); if (length + 1 > name_buf.Length) { name_buf = new byte[length + 2]; } length = encoding.GetBytes(entry.Name, 0, entry.Name.Length, name_buf, 0); name_buf[length++] = 0; name_offsets[i] = (uint)output.Position; output.Write(name_buf, 0, length); name_sizes[i] = length; ++i; if (output.Position > uint.MaxValue) { throw new FileSizeException(); } } uint index_offset = (uint)output.Position; using (var writer = new BinaryWriter(output, encoding, true)) { i = 0; foreach (var entry in list) { writer.Write(name_offsets[i]); writer.Write(name_sizes[i]); writer.Write((uint)entry.Offset); writer.Write(entry.Size); writer.Write(0); ++i; } uint index_length = (uint)(output.Position - index_offset); output.Position = 0; encoding.GetBytes("YKC001\0\0", 0, 8, name_buf, 0); writer.Write(name_buf, 0, 8); writer.Write(0x18); output.Position = 0x10; writer.Write(index_offset); writer.Write(index_length); } }
// files inside archive are aligned to 0x10 boundary. // to convert DateTime structure into entry time: // entry.FileTime = file_info.CreationTimeUtc.Ticks; // // last two bytes of archive is CRC16 of the whole file public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { const long data_offset = 0x10; var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var output_list = new List<OutputEntry> (list.Count()); foreach (var entry in list) { try { string name = Path.GetFileNameWithoutExtension (entry.Name); string ext = Path.GetExtension (entry.Name); byte[] name_buf = new byte[0x15]; byte[] ext_buf = new byte[3]; encoding.GetBytes (name, 0, name.Length, name_buf, 0); if (!string.IsNullOrEmpty (ext)) { ext = ext.TrimStart ('.').ToLowerInvariant(); encoding.GetBytes (ext, 0, ext.Length, ext_buf, 0); } var out_entry = new OutputEntry { Name = entry.Name, IndexName = name_buf, IndexExt = ext_buf, }; output_list.Add (out_entry); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong, X); } } if (null != callback) callback (output_list.Count+2, null, null); output.Position = data_offset; uint current_offset = 0; foreach (var entry in output_list) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.FileTime = File.GetCreationTimeUtc (entry.Name).Ticks; entry.Offset = current_offset; entry.CompressionType = 0; using (var input = File.OpenRead (entry.Name)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size + 0x0f > uint.MaxValue) throw new FileSizeException(); entry.Size = (uint)size; entry.UnpackedSize = entry.Size; using (var checked_stream = new CheckedStream (output, new Crc16())) { input.CopyTo (checked_stream); entry.HasCheckSum = true; entry.CheckSum = (ushort)checked_stream.CheckSumValue; } current_offset += (uint)size + 0x0f; current_offset &= ~0x0fu; output.Position = data_offset + current_offset; } } if (null != callback) callback (callback_count++, null, arcStrings.MsgUpdatingIndex); // at last, go back to directory and write offset/sizes uint index_offset = current_offset; using (var index = new BinaryWriter (output, encoding, true)) { foreach (var entry in output_list) { index.Write (entry.IndexName); index.Write (entry.IndexExt); index.Write ((uint)entry.Offset); index.Write (entry.UnpackedSize); index.Write (entry.Size); index.Write (entry.CompressionType); index.Write (entry.HasCheckSum); index.Write (entry.CheckSum); index.Write (entry.FileTime); } index.BaseStream.Position = 0; index.Write (Signature); index.Write (0x03006b63); index.Write (index_offset); index.Write (output_list.Count); if (null != callback) callback (callback_count++, null, arcStrings.MsgCalculatingChecksum); output.Position = 0; using (var checked_stream = new CheckedStream (output, new Crc16())) { checked_stream.CopyTo (Stream.Null); index.Write ((ushort)checked_stream.CheckSumValue); } } }
public CosmosApp(string name, CosmosAppArgs args, ComponentResourceOptions?options = null) : base("examples:azure:CosmosApp", name, options) { var resourceGroup = args.ResourceGroup; var locations = args.Locations; var primaryLocation = locations[0]; var parentOptions = (CustomResourceOptions)ResourceOptions.Merge(new CustomResourceOptions { Parent = this }, options); // Cosmos DB Account with multiple replicas var cosmosAccount = new Account($"cosmos-{name}", new AccountArgs { ResourceGroupName = resourceGroup.Name, Location = primaryLocation, GeoLocations = locations.Select((l, i) => new AccountGeoLocationsArgs { Location = l, FailoverPriority = i }).ToArray(), OfferType = "Standard", ConsistencyPolicy = new AccountConsistencyPolicyArgs { ConsistencyLevel = "Session" }, EnableMultipleWriteLocations = args.EnableMultiMaster, }, parentOptions); var database = new SqlDatabase($"db-{name}", new SqlDatabaseArgs { ResourceGroupName = resourceGroup.Name, AccountName = cosmosAccount.Name, Name = args.DatabaseName, }, parentOptions); var container = new SqlContainer($"sql-{name}", new SqlContainerArgs { ResourceGroupName = resourceGroup.Name, AccountName = cosmosAccount.Name, DatabaseName = database.Name, Name = args.ContainerName, }, parentOptions); // Traffic Manager as a global HTTP endpoint var profile = new TrafficManagerProfile($"tm{name}", new TrafficManagerProfileArgs { ResourceGroupName = resourceGroup.Name, TrafficRoutingMethod = "Performance", DnsConfigs = { new TrafficManagerProfileDnsConfigsArgs { // Subdomain must be globally unique, so we default it with the full resource group name RelativeName = Output.Format($"{name}{resourceGroup.Name}"), Ttl = 60, } }, MonitorConfigs = { new TrafficManagerProfileMonitorConfigsArgs { Protocol = "HTTP", Port = 80, Path = "/api/ping", } }, }, parentOptions); var globalContext = new GlobalContext(resourceGroup, cosmosAccount, database, container, parentOptions); var buildLocation = args.Factory(globalContext); var endpointOptions = (CustomResourceOptions)ResourceOptions.Merge(options, new CustomResourceOptions { Parent = profile, DeleteBeforeReplace = true }); var endpoints = locations.Select(location => { var app = buildLocation(new RegionalContext(location)); return(new TrafficManagerEndpoint($"tm{name}{location}".Truncate(16), new TrafficManagerEndpointArgs { ResourceGroupName = resourceGroup.Name, ProfileName = profile.Name, Type = app.Type, TargetResourceId = app.Id, Target = app.Url, EndpointLocation = location, }, endpointOptions)); }).ToList(); this.Endpoint = Output.Format($"http://{profile.Fqdn}"); this.RegisterOutputs(); }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } writer.Write(0); byte[] name_buf = new byte[256]; uint index_size = 0; var entry_sizes = new List <int>(); // first, write names only foreach (var entry in list) { try { int size = encoding.GetBytes(entry.Name, 0, entry.Name.Length, name_buf, 0); if (name_buf.Length == size) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } name_buf[size] = 0; int entry_size = size + 17; writer.Write(entry_size); writer.BaseStream.Seek(12, SeekOrigin.Current); writer.Write(name_buf, 0, size + 1); entry_sizes.Add(entry_size); index_size += (uint)entry_size; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong, X); } } // now, write files and remember offset/sizes long current_offset = writer.BaseStream.Position; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } current_offset += file_size; entry.Size = (uint)file_size; input.CopyTo(output); } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to directory and write offset/sizes writer.BaseStream.Position = 0; writer.Write(index_size); long index_offset = 4 + 8; int i = 0; foreach (var entry in list) { writer.BaseStream.Position = index_offset; int entry_size = entry_sizes[i++]; index_offset += entry_size; writer.Write((uint)entry.Offset); writer.Write(entry.Size); } } }
public static IEnumerable<string> GetResources(ResourceOptions options = ResourceOptions.Release | ResourceOptions.Odo) { // write all of the embedded resources out return RequiredAssets.Where(x => x.Options.HasFlag(options)).Select(x => x.FileName); }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { ArcFile base_archive = null; var ami_options = GetOptions<AmiOptions> (options); if (null != ami_options && ami_options.UseBaseArchive && !string.IsNullOrEmpty (ami_options.BaseArchive)) { var base_file = new ArcView (ami_options.BaseArchive); try { if (base_file.View.ReadUInt32(0) == Signature) base_archive = TryOpen (base_file); if (null == base_archive) throw new InvalidFormatException (string.Format ("{0}: base archive could not be read", Path.GetFileName (ami_options.BaseArchive))); base_file = null; } finally { if (null != base_file) base_file.Dispose(); } } try { var file_table = new SortedDictionary<uint, PackedEntry>(); if (null != base_archive) { foreach (AmiEntry entry in base_archive.Dir) file_table[entry.Id] = entry; } int update_count = UpdateFileTable (file_table, list); if (0 == update_count) throw new InvalidFormatException (arcStrings.AMINoFiles); uint file_count = (uint)file_table.Count; if (null != callback) callback ((int)file_count+1, null, null); int callback_count = 0; long start_offset = output.Position; uint data_offset = file_count * 16 + 16; output.Seek (data_offset, SeekOrigin.Current); foreach (var entry in file_table) { if (null != callback) callback (callback_count++, entry.Value, arcStrings.MsgAddingFile); long current_offset = output.Position; if (current_offset > uint.MaxValue) throw new FileSizeException(); if (entry.Value is AmiEntry) CopyAmiEntry (base_archive, entry.Value, output); else entry.Value.Size = WriteAmiEntry (entry.Value, output); entry.Value.Offset = (uint)current_offset; } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var header = new BinaryWriter (output, Encoding.ASCII, true)) { header.Write (Signature); header.Write (file_count); header.Write (data_offset); header.Write ((uint)0); foreach (var entry in file_table) { header.Write (entry.Key); header.Write ((uint)entry.Value.Offset); header.Write ((uint)entry.Value.UnpackedSize); header.Write ((uint)entry.Value.Size); } } } finally { if (null != base_archive) base_archive.Dispose(); } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var rpa_options = GetOptions <RpaOptions> (options); int callback_count = 0; var file_table = new Dictionary <PyString, ArrayList>(); long data_offset = 0x22; output.Position = data_offset; foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } string name = entry.Name.Replace(@"\", "/"); var rpa_entry = new RpaEntry { Name = name }; using (var file = File.OpenRead(entry.Name)) { var size = file.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } int header_size = (int)Math.Min(size, 0x10); rpa_entry.Offset = output.Position ^ rpa_options.Key; rpa_entry.Header = new byte[header_size]; rpa_entry.UnpackedSize = (uint)size ^ rpa_options.Key; rpa_entry.Size = (uint)(size - header_size); file.Read(rpa_entry.Header, 0, header_size); file.CopyTo(output); } var py_name = new PyString(name); if (file_table.ContainsKey(py_name)) { file_table[py_name].Add(rpa_entry); } else { file_table[py_name] = new ArrayList { rpa_entry } }; } long index_pos = output.Position; string signature = string.Format(CultureInfo.InvariantCulture, "RPA-3.0 {0:x16} {1:x8}\n", index_pos, rpa_options.Key); var header = Encoding.ASCII.GetBytes(signature); if (header.Length > data_offset) { throw new ApplicationException("Signature serialization failed."); } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } using (var index = new ZLibStream(output, CompressionMode.Compress, CompressionLevel.Level9, true)) { var pickle = new Pickle(index); if (!pickle.Dump(file_table)) { throw new ApplicationException("Archive index serialization failed."); } } output.Position = 0; output.Write(header, 0, header.Length); } }
public ResourceGroup(string name, ResourceGroupArgs args = default, ResourceOptions opts = default) : base("azure:core/resourceGroup:ResourceGroup", name, args, opts) { }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var ypf_options = GetOptions<YpfOptions> (options); if (null == ypf_options) throw new ArgumentException ("Invalid archive creation options", "options"); if (ypf_options.Key > 0xff) throw new InvalidEncryptionScheme (arcStrings.MsgCreationKeyRequired); if (0 == ypf_options.Version) throw new InvalidFormatException (arcStrings.MsgInvalidVersion); var scheme = new YpfScheme { SwapTable = GuessSwapTable (ypf_options.Version), Key = (byte)ypf_options.Key }; int callback_count = 0; var encoding = Encodings.cp932.WithFatalFallback(); ChecksumFunc Checksum = data => Crc32.Compute (data, 0, data.Length); uint data_offset = 0x20; var file_table = new List<YpfEntry>(); foreach (var entry in list) { try { string file_name = entry.Name; byte[] name_buf = encoding.GetBytes (file_name); if (name_buf.Length > 0xff) throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong); uint hash = Checksum (name_buf); byte file_type = GetFileType (ypf_options.Version, file_name); for (int i = 0; i < name_buf.Length; ++i) name_buf[i] = (byte)(name_buf[i] ^ ypf_options.Key); file_table.Add (new YpfEntry { Name = file_name, IndexName = name_buf, NameHash = hash, FileType = file_type, IsPacked = 0 == file_type, }); data_offset += (uint)(0x17 + name_buf.Length); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } } file_table.Sort ((a, b) => a.NameHash.CompareTo (b.NameHash)); output.Position = data_offset; uint current_offset = data_offset; foreach (var entry in file_table) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = current_offset; using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue || current_offset + file_size > uint.MaxValue) throw new FileSizeException(); entry.UnpackedSize = (uint)file_size; using (var checked_stream = new CheckedStream (output, new Adler32())) { if (entry.IsPacked) { var start = output.Position; using (var zstream = new ZLibStream (checked_stream, CompressionMode.Compress, CompressionLevel.Level9, true)) { input.CopyTo (zstream); } entry.Size = (uint)(output.Position - start); } else { input.CopyTo (checked_stream); entry.Size = entry.UnpackedSize; } checked_stream.Flush(); entry.CheckSum = checked_stream.CheckSumValue; current_offset += entry.Size; } } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = 0; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Signature); writer.Write (ypf_options.Version); writer.Write (file_table.Count); writer.Write (data_offset); writer.BaseStream.Seek (0x20, SeekOrigin.Begin); foreach (var entry in file_table) { writer.Write (entry.NameHash); byte name_len = (byte)~Parser.DecryptLength (scheme.SwapTable, (byte)entry.IndexName.Length); writer.Write (name_len); writer.Write (entry.IndexName); writer.Write (entry.FileType); writer.Write (entry.IsPacked); writer.Write (entry.UnpackedSize); writer.Write (entry.Size); writer.Write ((uint)entry.Offset); writer.Write (entry.CheckSum); } } }
public static string GetHtmlIncludes(string path, ResourceOptions options = ResourceOptions.Release | ResourceOptions.Odo) { return GetResources(options) .Select(x => string.Format("<script type='text/javascript' src='{0}' ></script>", Path.Combine(path, x).Replace('\\', '/'))) .Delimit("\n"); }
/// <summary> /// Request a generic resource from the service server(s). /// </summary> /// <param name="route"> /// The API route to make the request to. /// </param> /// <param name="responseReceived"> /// Gets set to false if no response was received, otherwise false. /// </param> /// <param name="noLogging"> /// Whether or not to log errors. Since HttpWebRequest brilliant throws exceptions for /// non-success HTTP status codes, it's nice to be able to control whether or not your /// request should have errors logged. /// </param> /// <returns> /// A non-null byte array on success. Null byte array on failure. /// </returns> public byte[] RequestResource(string resourceUri, out HttpStatusCode code, out bool responseReceived, ResourceOptions options = null, ServiceResource resource = ServiceResource.Custom) { if (options == null) { options = new ResourceOptions(); // Instantiate a resource options object with default options. } responseReceived = true; Dictionary <string, object> parameters = new Dictionary <string, object>(); try { // Try to send the device name as well. Helps distinguish between clients under the // same account. string deviceName = string.Empty; try { deviceName = Environment.MachineName; } catch { deviceName = "Unknown"; } var accessToken = WebServiceUtil.Default.AuthToken; //m_logger.Info("RequestResource1: accessToken=" + accessToken); IVersionProvider versionProvider = PlatformTypes.New <IVersionProvider>(); string version = versionProvider.GetApplicationVersion().ToString(3); // Build out post data with username and identifier. parameters.Add("identifier", FingerprintService.Default.Value); parameters.Add("device_id", deviceName); string postString = null; //string postString = string.Format("&identifier={0}&device_id={1}", FingerprintService.Default.Value, Uri.EscapeDataString(deviceName)); if (options.Parameters != null) { foreach (var parameter in options.Parameters) { parameters.Add(parameter.Key, parameter.Value); } } if (resource == ServiceResource.UserDataSumCheck || resource == ServiceResource.UserConfigSumCheck) { m_logger.Info("Sending version {0} to server", version); parameters.Add("app_version", version); } switch (options.ContentType) { case "application/x-www-form-urlencoded": postString = string.Join("&", parameters.Select(kv => $"{kv.Key}={kv.Value}")); break; case "application/json": postString = Newtonsoft.Json.JsonConvert.SerializeObject(parameters); break; } if (options.Method == "GET" || options.Method == "DELETE") { resourceUri += "?" + postString; if (postString.Contains("app_version")) { m_logger.Info("Sending postString as {0}", resourceUri); } } var request = GetApiBaseRequest(resourceUri, options); m_logger.Debug("WebServiceUtil.Request {0}", request.RequestUri); if (StringExtensions.Valid(accessToken)) { request.Headers.Add("Authorization", string.Format("Bearer {0}", accessToken)); } else if (resource != ServiceResource.RetrieveToken) { m_logger.Info("RequestResource1: Authorization failed."); AuthTokenRejected?.Invoke(); code = HttpStatusCode.Unauthorized; return(null); } if (options.Method != "GET" && options.Method != "DELETE") { if (postString.Contains("app_version")) { m_logger.Info("Sending {0} to server as {1}", postString, options.Method); } var formData = System.Text.Encoding.UTF8.GetBytes(postString); request.ContentLength = formData.Length; using (var requestStream = request.GetRequestStream()) { requestStream.Write(formData, 0, formData.Length); requestStream.Close(); } } m_logger.Info("RequestResource: uri={0}", request.RequestUri); // Now that our login form data has been POST'ed, get a response. using (var response = (HttpWebResponse)request.GetResponse()) { // Get the response code as an int so we can range check it. var intCode = (int)response.StatusCode; code = (HttpStatusCode)intCode; try { // Check if response code is considered a success code. if (intCode >= 200 && intCode <= 299) { using (var memoryStream = new MemoryStream()) { response.GetResponseStream().CopyTo(memoryStream); // We do this just in case we get something like a 204. The idea here // is that if we return a non-null, the call was a success. var responseBody = memoryStream.ToArray(); if (responseBody == null || intCode == 204) { return(null); } return(responseBody); } } else { m_logger.Info("When requesting resource, got unexpected response code of {0}.", code); } } finally { response.Close(); request.Abort(); } } } catch (WebException e) { // KF - Set this to 0 for default. 0's a pretty good indicator of no internet. code = 0; try { using (WebResponse response = e.Response) { if (response == null) { responseReceived = false; } HttpWebResponse httpResponse = (HttpWebResponse)response; m_logger.Error("Error code: {0}", httpResponse.StatusCode); int intCode = (int)httpResponse.StatusCode; code = (HttpStatusCode)intCode; // Auth failure means re-log EXCEPT when requesting deactivation. if ((intCode == 401 || intCode == 403) && resource != ServiceResource.DeactivationRequest) { WebServiceUtil.Default.AuthToken = string.Empty; m_logger.Info("RequestResource2: Authorization failed."); AuthTokenRejected?.Invoke(); } else if (intCode > 399 && intCode <= 499 && resource != ServiceResource.DeactivationRequest) { m_logger.Info("Error occurred in RequestResource: {0}", intCode); } using (Stream data = response.GetResponseStream()) using (var reader = new StreamReader(data)) { string text = reader.ReadToEnd(); m_logger.Error(text); } } } catch { } if (!options.NoLogging) { m_logger.Error(e.Message); m_logger.Error(e.StackTrace); } } catch (Exception e) { // XXX TODO - Good default? code = HttpStatusCode.InternalServerError; if (!options.NoLogging) { while (e != null) { m_logger.Error(e.Message); m_logger.Error(e.StackTrace); e = e.InnerException; } } } return(null); }
public bool Run() { Directory.SetCurrentDirectory (m_main.CurrentPath); var items = m_main.CurrentDirectory.SelectedItems.Cast<EntryViewModel> (); if (string.IsNullOrEmpty (m_arc_name)) { m_arc_name = Path.GetFileName (m_main.CurrentPath); if (!items.Skip (1).Any()) // items.Count() == 1 { var item = items.First(); if (item.IsDirectory) m_arc_name = Path.GetFileNameWithoutExtension (item.Name); } } var dialog = new CreateArchiveDialog (m_arc_name); dialog.Owner = m_main; if (!dialog.ShowDialog().Value) { return false; } if (string.IsNullOrEmpty (dialog.ArchiveName.Text)) { m_main.SetStatusText ("Archive name is empty"); return false; } m_format = dialog.ArchiveFormat.SelectedItem as ArchiveFormat; if (null == m_format) { m_main.SetStatusText ("Format is not selected"); return false; } m_options = dialog.ArchiveOptions; if (m_format.IsHierarchic) m_file_list = BuildFileList (items, AddFilesRecursive); else m_file_list = BuildFileList (items, AddFilesFromDir); m_arc_name = Path.GetFullPath (dialog.ArchiveName.Text); m_progress_dialog = new ProgressDialog () { WindowTitle = guiStrings.TextTitle, Text = string.Format (guiStrings.MsgCreatingArchive, Path.GetFileName (m_arc_name)), Description = "", MinimizeBox = true, }; m_progress_dialog.DoWork += CreateWorker; m_progress_dialog.RunWorkerCompleted += OnCreateComplete; m_progress_dialog.ShowDialog (m_main); return true; }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { using (var writer = new BinaryWriter (output, Encoding.ASCII, true)) { writer.Write (Signature); int list_size = list.Count(); uint dir_size = (uint)(list_size * 40); writer.Write (dir_size); writer.Write (list_size); var encoding = Encodings.cp932.WithFatalFallback(); byte[] name_buf = new byte[32]; int callback_count = 0; if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); // first, write names only foreach (var entry in list) { string name = Path.GetFileName (entry.Name); try { int size = encoding.GetBytes (name, 0, name.Length, name_buf, 0); if (size < name_buf.Length) name_buf[size] = 0; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong, X); } writer.Write (name_buf); writer.BaseStream.Seek (8, SeekOrigin.Current); } // now, write files and remember offset/sizes uint current_offset = 0; foreach (var entry in list) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = current_offset; using (var input = File.Open (entry.Name, FileMode.Open, FileAccess.Read)) { var size = input.Length; if (size > uint.MaxValue || current_offset + size > uint.MaxValue) throw new FileSizeException(); current_offset += (uint)size; entry.Size = (uint)size; input.CopyTo (output); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgUpdatingIndex); // at last, go back to directory and write offset/sizes long dir_offset = 12+32; foreach (var entry in list) { writer.BaseStream.Position = dir_offset; writer.Write ((uint)entry.Offset); writer.Write (entry.Size); dir_offset += 40; } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { ArcFile base_archive = null; var ami_options = GetOptions <AmiOptions> (options); if (null != ami_options && ami_options.UseBaseArchive && !string.IsNullOrEmpty(ami_options.BaseArchive)) { var base_file = new ArcView(ami_options.BaseArchive); try { if (base_file.View.ReadUInt32(0) == Signature) { base_archive = TryOpen(base_file); } if (null == base_archive) { throw new InvalidFormatException(string.Format("{0}: base archive could not be read", Path.GetFileName(ami_options.BaseArchive))); } base_file = null; } finally { if (null != base_file) { base_file.Dispose(); } } } try { var file_table = new SortedDictionary <uint, PackedEntry>(); if (null != base_archive) { foreach (AmiEntry entry in base_archive.Dir) { file_table[entry.Id] = entry; } } int update_count = UpdateFileTable(file_table, list); if (0 == update_count) { throw new InvalidFormatException(arcStrings.AMINoFiles); } uint file_count = (uint)file_table.Count; if (null != callback) { callback((int)file_count + 1, null, null); } int callback_count = 0; long start_offset = output.Position; uint data_offset = file_count * 16 + 16; output.Seek(data_offset, SeekOrigin.Current); foreach (var entry in file_table) { if (null != callback) { callback(callback_count++, entry.Value, arcStrings.MsgAddingFile); } long current_offset = output.Position; if (current_offset > uint.MaxValue) { throw new FileSizeException(); } if (entry.Value is AmiEntry) { CopyAmiEntry(base_archive, entry.Value, output); } else { entry.Value.Size = WriteAmiEntry(entry.Value, output); } entry.Value.Offset = (uint)current_offset; } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = start_offset; using (var header = new BinaryWriter(output, Encoding.ASCII, true)) { header.Write(Signature); header.Write(file_count); header.Write(data_offset); header.Write((uint)0); foreach (var entry in file_table) { header.Write(entry.Key); header.Write((uint)entry.Value.Offset); header.Write((uint)entry.Value.UnpackedSize); header.Write((uint)entry.Value.Size); } } } finally { if (null != base_archive) { base_archive.Dispose(); } } }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var arc_options = GetOptions<ArcOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int file_count = 0; var file_table = new SortedDictionary<string, ArcDirectory>(); foreach (var entry in list) { string ext = Path.GetExtension (entry.Name).TrimStart ('.').ToUpperInvariant(); if (string.IsNullOrEmpty (ext)) throw new InvalidFileName (entry.Name, arcStrings.MsgNoExtension); if (ext.Length > 3) throw new InvalidFileName (entry.Name, arcStrings.MsgExtensionTooLong); string name = Path.GetFileNameWithoutExtension (entry.Name).ToUpperInvariant(); byte[] raw_name = encoding.GetBytes (name); if (raw_name.Length > arc_options.NameLength) throw new InvalidFileName (entry.Name, arcStrings.MsgFileNameTooLong); ArcDirectory dir; if (!file_table.TryGetValue (ext, out dir)) { byte[] raw_ext = encoding.GetBytes (ext); if (raw_ext.Length > 3) throw new InvalidFileName (entry.Name, arcStrings.MsgExtensionTooLong); dir = new ArcDirectory { Extension = raw_ext, Files = new List<ArcEntry>() }; file_table[ext] = dir; } dir.Files.Add (new ArcEntry { Name = entry.Name, RawName = raw_name }); ++file_count; } if (null != callback) callback (file_count+1, null, null); int callback_count = 0; long dir_offset = 4 + file_table.Count * 12; long data_offset = dir_offset + (arc_options.NameLength + 9) * file_count; output.Position = data_offset; foreach (var ext in file_table.Keys) { var dir = file_table[ext]; dir.DirOffset = (uint)dir_offset; dir_offset += (arc_options.NameLength + 9) * dir.Files.Count; foreach (var entry in dir.Files) { if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = data_offset; entry.Size = WriteEntry (entry.Name, output); data_offset += entry.Size; if (data_offset > uint.MaxValue) throw new FileSizeException(); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = 0; using (var header = new BinaryWriter (output, encoding, true)) { byte[] buffer = new byte[arc_options.NameLength+1]; header.Write (file_table.Count); foreach (var ext in file_table) { Buffer.BlockCopy (ext.Value.Extension, 0, buffer, 0, ext.Value.Extension.Length); for (int i = ext.Value.Extension.Length; i < 4; ++i) buffer[i] = 0; header.Write (buffer, 0, 4); header.Write (ext.Value.Files.Count); header.Write (ext.Value.DirOffset); } foreach (var ext in file_table) { foreach (var entry in ext.Value.Files) { Buffer.BlockCopy (entry.RawName, 0, buffer, 0, entry.RawName.Length); for (int i = entry.RawName.Length; i < buffer.Length; ++i) buffer[i] = 0; header.Write (buffer); header.Write (entry.Size); header.Write ((uint)entry.Offset); } } } }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var arc_options = GetOptions <ArcOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int file_count = 0; var file_table = new SortedDictionary <string, ArcDirectory>(); foreach (var entry in list) { string ext = Path.GetExtension(entry.Name).TrimStart('.').ToUpperInvariant(); if (string.IsNullOrEmpty(ext)) { throw new InvalidFileName(entry.Name, arcStrings.MsgNoExtension); } if (ext.Length > 3) { throw new InvalidFileName(entry.Name, arcStrings.MsgExtensionTooLong); } string name = Path.GetFileNameWithoutExtension(entry.Name).ToUpperInvariant(); byte[] raw_name = encoding.GetBytes(name); if (raw_name.Length > arc_options.NameLength) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong); } ArcDirectory dir; if (!file_table.TryGetValue(ext, out dir)) { byte[] raw_ext = encoding.GetBytes(ext); if (raw_ext.Length > 3) { throw new InvalidFileName(entry.Name, arcStrings.MsgExtensionTooLong); } dir = new ArcDirectory { Extension = raw_ext, Files = new List <ArcEntry>() }; file_table[ext] = dir; } dir.Files.Add(new ArcEntry { Name = entry.Name, RawName = raw_name }); ++file_count; } if (null != callback) { callback(file_count + 1, null, null); } int callback_count = 0; long dir_offset = 4 + file_table.Count * 12; long data_offset = dir_offset + (arc_options.NameLength + 9) * file_count; output.Position = data_offset; foreach (var ext in file_table.Keys) { var dir = file_table[ext]; dir.DirOffset = (uint)dir_offset; dir_offset += (arc_options.NameLength + 9) * dir.Files.Count; foreach (var entry in dir.Files) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = data_offset; entry.Size = WriteEntry(entry.Name, output); data_offset += entry.Size; if (data_offset > uint.MaxValue) { throw new FileSizeException(); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = 0; using (var header = new BinaryWriter(output, encoding, true)) { byte[] buffer = new byte[arc_options.NameLength + 1]; header.Write(file_table.Count); foreach (var ext in file_table) { Buffer.BlockCopy(ext.Value.Extension, 0, buffer, 0, ext.Value.Extension.Length); for (int i = ext.Value.Extension.Length; i < 4; ++i) { buffer[i] = 0; } header.Write(buffer, 0, 4); header.Write(ext.Value.Files.Count); header.Write(ext.Value.DirOffset); } foreach (var ext in file_table) { foreach (var entry in ext.Value.Files) { Buffer.BlockCopy(entry.RawName, 0, buffer, 0, entry.RawName.Length); for (int i = entry.RawName.Length; i < buffer.Length; ++i) { buffer[i] = 0; } header.Write(buffer); header.Write(entry.Size); header.Write((uint)entry.Offset); } } } }
public Asset(string nameTemplate, ResourceOptions options) { Guard.NotNullOrWhiteSpace(nameTemplate, "nameTemplate"); ResourceName = string.Format("{0}.{1}", ResourceLocation, string.Format(nameTemplate, string.Empty).Replace('/', '.')); FileName = string.Format(nameTemplate, Version); Options = options; }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var ons_options = GetOptions <NsaOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List <NsaEntry>(); var used_names = new HashSet <string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add(entry.Name)) // duplicate name { continue; } try { index_size += encoding.GetByteCount(entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } var header_entry = new NsaEntry { Name = entry.Name }; if (Compression.None != ons_options.CompressionType) { if (entry.Name.HasExtension(".bmp")) { header_entry.CompressionType = ons_options.CompressionType; } } index_size += 13; real_entry_list.Add(header_entry); } long start_offset = output.Position; long base_offset = 6 + index_size; output.Seek(base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead(entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) { throw new FileSizeException(); } long file_offset = output.Position - base_offset; if (file_offset + file_size > uint.MaxValue) { throw new FileSizeException(); } if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = file_offset; entry.UnpackedSize = (uint)file_size; if (Compression.LZSS == entry.CompressionType) { var packer = new Packer(input, output); entry.Size = packer.EncodeLZSS(); } else { entry.Size = entry.UnpackedSize; entry.CompressionType = Compression.None; input.CopyTo(output); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } output.Position = start_offset; using (var writer = new BinaryWriter(output, encoding, true)) { writer.Write(Binary.BigEndian((short)real_entry_list.Count)); writer.Write(Binary.BigEndian((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write(encoding.GetBytes(entry.Name)); writer.Write((byte)0); writer.Write((byte)entry.CompressionType); writer.Write(Binary.BigEndian((uint)entry.Offset)); writer.Write(Binary.BigEndian((uint)entry.Size)); writer.Write(Binary.BigEndian((uint)entry.UnpackedSize)); } } }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var sg_options = GetOptions<SteinsGateOptions> (options); Encoding encoding = sg_options.FileNameEncoding.WithFatalFallback(); long start_pos = output.Position; int callback_count = 0; uint index_size = 4; var real_entry_list = new List<RawEntry> (list.Count()); var used_names = new HashSet<string>(); foreach (var entry in list) { string name = entry.Name.Replace (@"\", "/"); if (!used_names.Add (name)) // duplicate name continue; var header_entry = new RawEntry { Name = entry.Name }; try { header_entry.IndexName = encoding.GetBytes (name); } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } index_size += (uint)header_entry.IndexName.Length + 16; real_entry_list.Add (header_entry); } output.Seek (4+index_size, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.Open (entry.Name, FileMode.Open, FileAccess.Read)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); entry.Offset = output.Position; entry.Size = (uint)file_size; if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); using (var stream = new SteinsGateEncryptedStream (output)) input.CopyTo (stream); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_pos; output.WriteByte ((byte)(index_size & 0xff)); output.WriteByte ((byte)((index_size >> 8) & 0xff)); output.WriteByte ((byte)((index_size >> 16) & 0xff)); output.WriteByte ((byte)((index_size >> 24) & 0xff)); var encrypted_stream = new SteinsGateEncryptedStream (output); using (var header = new BinaryWriter (encrypted_stream)) { header.Write (real_entry_list.Count); foreach (var entry in real_entry_list) { header.Write (entry.IndexName.Length); header.Write (entry.IndexName); header.Write ((uint)entry.Size); header.Write ((long)entry.Offset); } } }
/// <summary> /// Create resource within stream <paramref name="file"/> containing entries from the /// supplied <paramref name="list"/> and applying necessary <paramref name="options"/>. /// </summary> public virtual void Create(Stream file, IEnumerable <Entry> list, ResourceOptions options = null, EntryCallback callback = null) { throw new NotImplementedException("ArchiveFormat.Create is not implemented"); }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var ons_options = GetOptions<NsaOptions> (options); var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List<NsaEntry>(); var used_names = new HashSet<string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add (entry.Name)) // duplicate name continue; try { index_size += encoding.GetByteCount (entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } var header_entry = new NsaEntry { Name = entry.Name }; if (Compression.None != ons_options.CompressionType) { if (entry.Name.EndsWith (".bmp", StringComparison.InvariantCultureIgnoreCase)) header_entry.CompressionType = ons_options.CompressionType; } index_size += 13; real_entry_list.Add (header_entry); } long start_offset = output.Position; long base_offset = 6+index_size; output.Seek (base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); long file_offset = output.Position - base_offset; if (file_offset+file_size > uint.MaxValue) throw new FileSizeException(); if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); entry.Offset = file_offset; entry.UnpackedSize = (uint)file_size; if (Compression.LZSS == entry.CompressionType) { var packer = new Packer (input, output); entry.Size = packer.EncodeLZSS(); } else { entry.Size = entry.UnpackedSize; entry.CompressionType = Compression.None; input.CopyTo (output); } } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Binary.BigEndian ((short)real_entry_list.Count)); writer.Write (Binary.BigEndian ((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write (encoding.GetBytes (entry.Name)); writer.Write ((byte)0); writer.Write ((byte)entry.CompressionType); writer.Write (Binary.BigEndian ((uint)entry.Offset)); writer.Write (Binary.BigEndian ((uint)entry.Size)); writer.Write (Binary.BigEndian ((uint)entry.UnpackedSize)); } } }
public byte[] RequestResource(ServiceResource resource, out HttpStatusCode code, out bool responseReceived, ResourceOptions options = null) { return(RequestResource(m_namedResourceMap[resource], out code, out responseReceived, options, resource)); }
// This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { services.AddControllers(); var resourceOptions = new ResourceOptions(); Configuration.Bind(resourceOptions); services.AddOptions <ResourceOptions>() .Bind(Configuration.GetSection(ResourceOptions.Resource)) .ValidateDataAnnotations(); services.AddOptions(); services.AddResourceServices(); services.AddAuthentication("Bearer") /*.AddJwtBearer("Bearer", options => * { * options.Audience = "api1"; * options.Authority = resourceOptions.IdentityServerUrl; * })*/ // https://www.scottbrady91.com/identity-server/aspnet-core-swagger-ui-authorization-using-identityserver4 .AddIdentityServerAuthentication("Bearer", options => { // required audience of access tokens // see https://identityserver4.readthedocs.io/en/latest/topics/resources.html // as we use options.EmitStaticAudienceClaim = true; in loginservice startup we use this ApiName options.ApiName = resourceOptions.IdentityServerUrl.AppendPathSegment("resources"); //"api1"; // auth server base endpoint (this will be used to search for disco doc) options.Authority = resourceOptions.IdentityServerUrl; }); var targetHost = "www.microsoft.com"; var targetHostIpAddresses = Dns.GetHostAddresses(targetHost).Select(h => h.ToString()).ToArray(); var targetHost2 = "localhost"; var targetHost2IpAddresses = Dns.GetHostAddresses(targetHost2).Select(h => h.ToString()).ToArray(); var maximumMemory = 104857600; services.AddHealthChecks() .AddDbContextCheck <ResourceDataContext>() .AddIdentityServer(new Uri(resourceOptions.IdentityServerUrl), "test IdSrv", tags: new string[] { "IdSrv" }) .AddDnsResolveHealthCheck(setup => { setup.ResolveHost(targetHost).To(targetHostIpAddresses) .ResolveHost(targetHost2).To(targetHost2IpAddresses); }, tags: new string[] { "dns" }, name: "DNS Check") .AddPingHealthCheck(setup => { setup.AddHost("127.0.0.1", 5000); }, tags: new string[] { "ping" }, name: "Ping Check") .AddTcpHealthCheck(setup => { setup.AddHost("127.0.0.1", 1121); }, tags: new string[] { "tcp" }, name: "Logging TCP port Check") .AddPrivateMemoryHealthCheck(maximumMemory , tags: new string[] { "privatememory" }, name: "PrivateMemory Check") .AddWorkingSetHealthCheck(maximumMemory , tags: new string[] { "workingset" }, name: "WorkingSet Check") .AddVirtualMemorySizeHealthCheck(maximumMemory , tags: new string[] { "virtualmemory" }, name: "VirtualMemory Check"); // Add a health check for a SQL Server database services.AddDbContext <ResourceDataContext>(options => options.UseSqlServer(Configuration.GetConnectionString("ResourceDb"))); services.AddSwaggerGen(c => { c.SwaggerDoc("v1", new OpenApiInfo { Title = "Swagger and HealthCheck blog Login Service", Version = "v1" }); c.OperationFilter <AuthorizeOperationFilter>(); c.AddSecurityDefinition("oauth2", new OpenApiSecurityScheme { Type = SecuritySchemeType.OAuth2, Flows = new OpenApiOAuthFlows { AuthorizationCode = new OpenApiOAuthFlow { AuthorizationUrl = new Uri("https://localhost:1115/connect/authorize"), TokenUrl = new Uri("https://localhost:1115/connect/token"), Scopes = new Dictionary <string, string> { { "api1", "Demo API - full access" } } } } }); }); services.AddSingleton <IAuthorizationMiddlewareResultHandler, MyAuthorizationMiddlewareResultHandler>(); // Multi Tenant Services services.AddMultiTenant <TenantInfo>() .WithRouteStrategy() .WithConfigurationStore(); }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { int file_count = list.Count(); if (file_count > 0x4000) { throw new InvalidFormatException(arcStrings.MsgTooManyFiles); } if (null != callback) { callback(file_count + 2, null, null); } int callback_count = 0; var pd_options = GetOptions <PdOptions> (options); using (var writer = new BinaryWriter(output, Encoding.ASCII, true)) { writer.Write(Signature); if (pd_options.ScrambleContents) { writer.Write((uint)0x73756c50); } else { writer.Write((uint)0x796c6e4f); } output.Seek(0x38, SeekOrigin.Current); writer.Write(file_count); writer.Write((int)0); long dir_offset = output.Position; if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } var encoding = Encodings.cp932.WithFatalFallback(); byte[] name_buf = new byte[0x80]; int previous_size = 0; // first, write names only foreach (var entry in list) { string name = Path.GetFileName(entry.Name); try { int size = encoding.GetBytes(name, 0, name.Length, name_buf, 0); for (int i = size; i < previous_size; ++i) { name_buf[i] = 0; } previous_size = size; } catch (EncoderFallbackException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgIllegalCharacters, X); } catch (ArgumentException X) { throw new InvalidFileName(entry.Name, arcStrings.MsgFileNameTooLong, X); } writer.Write(name_buf); writer.BaseStream.Seek(16, SeekOrigin.Current); } // now, write files and remember offset/sizes long current_offset = 0x240000 + dir_offset; output.Seek(current_offset, SeekOrigin.Begin); foreach (var entry in list) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } entry.Offset = current_offset; using (var input = File.OpenRead(entry.Name)) { var size = input.Length; if (size > uint.MaxValue) { throw new FileSizeException(); } current_offset += size; entry.Size = (uint)size; if (pd_options.ScrambleContents) { CopyScrambled(input, output); } else { input.CopyTo(output); } } } if (null != callback) { callback(callback_count++, null, arcStrings.MsgUpdatingIndex); } // at last, go back to directory and write offset/sizes dir_offset += 0x80; foreach (var entry in list) { writer.BaseStream.Position = dir_offset; writer.Write(entry.Offset); writer.Write((long)entry.Size); dir_offset += 0x90; } } }
public Func <RegionalContext, IRegionalEndpoint> BuildVMScaleSetApp(GlobalContext context) { var options = (CustomResourceOptions)ResourceOptions.Merge(context.Options, new CustomResourceOptions { DeleteBeforeReplace = true }); var file = File.ReadAllText("./vm/vmCustomData.yaml"); return((RegionalContext region) => { var location = region.Location; var domainName = $"rnddnplm{location}"; //TODO: random var publicIp = new PublicIp($"pip-{location}", new PublicIpArgs { ResourceGroupName = resourceGroup.Name, Location = location, AllocationMethod = "Static", DomainNameLabel = domainName, }, options); var loadBalancer = new LoadBalancer($"lb-{location}", new LoadBalancerArgs { ResourceGroupName = resourceGroup.Name, Location = location, FrontendIpConfigurations = { new LoadBalancerFrontendIpConfigurationsArgs { Name = "PublicIPAddress", PublicIpAddressId = publicIp.Id, } } }, options); var bpepool = new BackendAddressPool($"bap-{location}", new BackendAddressPoolArgs { ResourceGroupName = resourceGroup.Name, LoadbalancerId = loadBalancer.Id, }, options); var probe = new Probe($"ssh-probe-{location}".Truncate(16), new ProbeArgs { ResourceGroupName = resourceGroup.Name, LoadbalancerId = loadBalancer.Id, Port = 80, }, options); var rule = new Rule($"rule-{location}", new RuleArgs { ResourceGroupName = resourceGroup.Name, BackendAddressPoolId = bpepool.Id, BackendPort = 80, FrontendIpConfigurationName = "PublicIPAddress", FrontendPort = 80, LoadbalancerId = loadBalancer.Id, ProbeId = probe.Id, Protocol = "Tcp", }, options); var vnet = new VirtualNetwork($"vnet-{location}", new VirtualNetworkArgs { ResourceGroupName = resourceGroup.Name, Location = location, AddressSpaces = { "10.0.0.0/16" }, }, options); var subnet = new Subnet($"subnet-{location}", new SubnetArgs { ResourceGroupName = resourceGroup.Name, AddressPrefix = "10.0.2.0/24", VirtualNetworkName = vnet.Name, }, options); var customData = Output.All <string>(context.CosmosAccount.Endpoint, context.CosmosAccount.PrimaryMasterKey, context.Database.Name, context.Container.Name) .Apply(values => { return file.Replace("${ENDPOINT}", values[0]) .Replace("${MASTER_KEY}", values[1]) .Replace("${DATABASE}", values[2]) .Replace("${COLLECTION}", values[3]) .Replace("${LOCATION}", location); }); var scaleSet = new ScaleSet($"vmss-{location}", new ScaleSetArgs { ResourceGroupName = resourceGroup.Name, Location = location, NetworkProfiles = { new ScaleSetNetworkProfilesArgs { IpConfigurations = { new ScaleSetNetworkProfilesIpConfigurationsArgs { LoadBalancerBackendAddressPoolIds ={ bpepool.Id }, Name = "IPConfiguration", Primary = true, SubnetId = subnet.Id, } }, Name = "networkprofile", Primary = true, } }, OsProfile = new ScaleSetOsProfileArgs { AdminUsername = "******", AdminPassword = "******", ComputerNamePrefix = "lab", CustomData = customData, }, OsProfileLinuxConfig = new ScaleSetOsProfileLinuxConfigArgs { DisablePasswordAuthentication = false }, Sku = new ScaleSetSkuArgs { Capacity = 1, Name = "Standard_DS1_v2", Tier = "Standard", }, StorageProfileDataDisks = { new ScaleSetStorageProfileDataDisksArgs { Caching = "ReadWrite", CreateOption = "Empty", DiskSizeGb = 10, Lun = 0, } }, StorageProfileImageReference = new ScaleSetStorageProfileImageReferenceArgs { Offer = "UbuntuServer", Publisher = "Canonical", Sku = "18.04-LTS", Version = "latest", }, StorageProfileOsDisk = new ScaleSetStorageProfileOsDiskArgs { Caching = "ReadWrite", CreateOption = "FromImage", ManagedDiskType = "Standard_LRS", Name = "", }, UpgradePolicyMode = "Automatic", }, (CustomResourceOptions)ResourceOptions.Merge(options, new ResourceOptions { DependsOn = { bpepool, rule } })); var autoscale = new AutoscaleSetting($"as-{location}", new AutoscaleSettingArgs { ResourceGroupName = resourceGroup.Name, Location = location, Notification = new AutoscaleSettingNotificationArgs { Email = new AutoscaleSettingNotificationEmailArgs { CustomEmails = { "*****@*****.**" }, SendToSubscriptionAdministrator = true, SendToSubscriptionCoAdministrator = true, }, }, Profiles = { new AutoscaleSettingProfilesArgs { Capacity = new AutoscaleSettingProfilesCapacityArgs { Default = 1, Maximum = 10, Minimum = 1, }, Name = "defaultProfile", Rules = { new AutoscaleSettingProfilesRulesArgs { MetricTrigger = new AutoscaleSettingProfilesRulesMetricTriggerArgs { MetricName = "Percentage CPU", MetricResourceId = scaleSet.Id, Operator = "GreaterThan", Statistic = "Average", Threshold = 75, TimeAggregation = "Average", TimeGrain = "PT1M", TimeWindow = "PT5M", }, ScaleAction = new AutoscaleSettingProfilesRulesScaleActionArgs { Cooldown = "PT1M", Direction = "Increase", Type = "ChangeCount", Value = 1, }, }, new AutoscaleSettingProfilesRulesArgs { MetricTrigger = new AutoscaleSettingProfilesRulesMetricTriggerArgs { MetricName = "Percentage CPU", MetricResourceId = scaleSet.Id, Operator = "LessThan", Statistic = "Average", Threshold = 25, TimeAggregation = "Average", TimeGrain = "PT1M", TimeWindow = "PT5M", }, ScaleAction = new AutoscaleSettingProfilesRulesScaleActionArgs { Cooldown = "PT1M", Direction = "Decrease", Type = "ChangeCount", Value = 1, }, }, } } }, TargetResourceId = scaleSet.Id, }, options); return new AzureEndpoint(publicIp.Id); }); }
public override void Create(Stream output, IEnumerable <Entry> list, ResourceOptions options, EntryCallback callback) { var npk_options = GetOptions <Npk2Options> (options); if (null == npk_options.Key) { throw new InvalidEncryptionScheme(); } var enc = DefaultEncoding.WithFatalFallback(); int index_length = 0; var dir = new List <NpkStoredEntry>(); foreach (var entry in list) { var ext = Path.GetExtension(entry.Name).ToLowerInvariant(); var npk_entry = new NpkStoredEntry { Name = entry.Name, RawName = enc.GetBytes(entry.Name.Replace('\\', '/')), IsSolid = SolidFiles.Contains(ext), IsPacked = !DisableCompression.Contains(ext), }; int segment_count = 1; if (!npk_entry.IsSolid) { segment_count = (int)(((long)entry.Size + DefaultSegmentSize - 1) / DefaultSegmentSize); } index_length += 3 + npk_entry.RawName.Length + 0x28 + segment_count * 0x14; dir.Add(npk_entry); } index_length = (index_length + 0xF) & ~0xF; int callback_count = 0; using (var aes = Aes.Create()) { aes.Mode = CipherMode.CBC; aes.Padding = PaddingMode.PKCS7; aes.Key = npk_options.Key; aes.IV = GenerateAesIV(); output.Position = 0x20 + index_length; foreach (var entry in dir) { if (null != callback) { callback(callback_count++, entry, arcStrings.MsgAddingFile); } using (var writer = new NpkWriter(entry, output, aes)) writer.Write(DefaultSegmentSize); } output.Position = 0; var buffer = new byte[] { (byte)'N', (byte)'P', (byte)'K', (byte)'2', 1, 0, 0, 0 }; output.Write(buffer, 0, 8); output.Write(aes.IV, 0, 0x10); LittleEndian.Pack(dir.Count, buffer, 0); LittleEndian.Pack(index_length, buffer, 4); output.Write(buffer, 0, 8); using (var encryptor = aes.CreateEncryptor()) using (var proxy = new ProxyStream(output, true)) using (var index_stream = new CryptoStream(proxy, encryptor, CryptoStreamMode.Write)) using (var index = new BinaryWriter(index_stream)) { if (null != callback) { callback(callback_count++, null, arcStrings.MsgWritingIndex); } foreach (var entry in dir) { index.Write(entry.IsSolid); // 0 -> segmentation enabled, 1 -> no segmentation index.Write((short)entry.RawName.Length); index.Write(entry.RawName); index.Write(entry.UnpackedSize); index.Write(entry.CheckSum); index.Write(entry.Segments.Count); foreach (var segment in entry.Segments) { index.Write(segment.Offset); index.Write(segment.AlignedSize); index.Write(segment.Size); index.Write(segment.UnpackedSize); } } } } }
public override void Create(Stream output, IEnumerable<Entry> list, ResourceOptions options, EntryCallback callback) { var encoding = Encodings.cp932.WithFatalFallback(); int callback_count = 0; var real_entry_list = new List<Entry>(); var used_names = new HashSet<string>(); int index_size = 0; foreach (var entry in list) { if (!used_names.Add (entry.Name)) // duplicate name continue; try { index_size += encoding.GetByteCount (entry.Name) + 1; } catch (EncoderFallbackException X) { throw new InvalidFileName (entry.Name, arcStrings.MsgIllegalCharacters, X); } index_size += 8; real_entry_list.Add (entry); } long start_offset = output.Position; long base_offset = 6+index_size; output.Seek (base_offset, SeekOrigin.Current); foreach (var entry in real_entry_list) { using (var input = File.OpenRead (entry.Name)) { var file_size = input.Length; if (file_size > uint.MaxValue) throw new FileSizeException(); long file_offset = output.Position - base_offset; if (file_offset+file_size > uint.MaxValue) throw new FileSizeException(); entry.Offset = file_offset; entry.Size = (uint)file_size; if (null != callback) callback (callback_count++, entry, arcStrings.MsgAddingFile); input.CopyTo (output); } } if (null != callback) callback (callback_count++, null, arcStrings.MsgWritingIndex); output.Position = start_offset; using (var writer = new BinaryWriter (output, encoding, true)) { writer.Write (Binary.BigEndian ((short)real_entry_list.Count)); writer.Write (Binary.BigEndian ((uint)base_offset)); foreach (var entry in real_entry_list) { writer.Write (encoding.GetBytes (entry.Name)); writer.Write ((byte)0); writer.Write (Binary.BigEndian ((uint)entry.Offset)); writer.Write (Binary.BigEndian ((uint)entry.Size)); } } }