public static SampleSet operator *(SampleSet multiplicand, double multiplier) { SampleSet product = multiplicand; product.Multiply(multiplier); return(product); }
/// <summary> /// Generates a new <see cref="TimelineObject"/>. /// </summary> /// <param name="origin"></param> /// <param name="time"></param> /// <param name="objectType"></param> /// <param name="repeat"></param> /// <param name="hitsounds"></param> /// <param name="sampleset"></param> /// <param name="additionset"></param> public TimelineObject(HitObject origin, double time, int objectType, int repeat, int hitsounds, SampleSet sampleset, SampleSet additionset) { Origin = origin; Time = time; BitArray b = new BitArray(new[] { hitsounds }); Normal = b[0]; Whistle = b[1]; Finish = b[2]; Clap = b[3]; SampleSet = sampleset; AdditionSet = additionset; ObjectType = objectType; Repeat = repeat; if (IsCircle || IsHoldnoteHead) // Can have custom index/volume/filename { CustomIndex = origin.CustomIndex; SampleVolume = origin.SampleVolume; Filename = origin.Filename; } }
public override int Run(string[] args) { Console.WriteLine("Start"); fill_params(args); Console.WriteLine("Params get \nfile tra {0} \nfile name tst {1} ", file_learn, file_test); Class_learn_set = new SampleSet(file_learn); Class_learn_set = new SampleSet(file_learn); Console.WriteLine("Tra create"); Class_test_set = new SampleSet(file_test); Console.WriteLine("Tst create"); conf = new InitBySamplesConfig(); conf.Init(Class_learn_set.CountVars); fill_conf(); Console.WriteLine("Conf Filed"); Class_Pittsburg = new PCFuzzySystem(Class_learn_set, Class_test_set); Console.WriteLine("Classifier created"); generator = new GeneratorRulesBySamples(); Class_Pittsburg = generator.Generate(Class_Pittsburg, conf); // GeneratorRulesBySamples.InitRulesBySamples(Class_Pittsburg, func); // SingletoneApproximate = generator.Generate(SingletoneApproximate, conf); Console.WriteLine("Gereration complite"); PCFSUFSWriter.saveToUFS(Class_Pittsburg, file_out); Console.WriteLine("Saved"); return(1); }
public static SampleSet operator /(SampleSet dividend, double divisor) { SampleSet quotient = dividend; quotient.Divide(divisor); return(quotient); }
private void AddFirstIdenticalFilename(SampleSet sampleSet, Hitsound hitsound, int index, List <string> samples, GameMode mode, bool useFilename, string mapDir, Dictionary <string, string> firstSamples, bool includeDefaults) { string filename = GetFileName(sampleSet, hitsound, index, mode); string samplePath = Path.Combine(mapDir, filename); string fullPathExtLess = Path.Combine( Path.GetDirectoryName(samplePath) ?? throw new InvalidOperationException(), Path.GetFileNameWithoutExtension(samplePath)); // Get the first occurence of this sound to not get duplicated if (firstSamples.Keys.Contains(fullPathExtLess)) { if (!useFilename) { samples.Add(Path.GetFileName(firstSamples[fullPathExtLess])); } } else { // Sample doesn't exist if (!useFilename && includeDefaults) { samples.Add(GetFileName(sampleSet, hitsound, 0, mode)); } } }
public ClassifierFactory <T> Train(SampleSet <T> trainingSamples, int trainingSize, int xValidationStart = 0, int xValidationLength = 1) { // train with a dataset that may require additional cross validation classifier instances xClassifierInstances = Enumerable.Range(xValidationStart, xValidationLength).AsParallel().Select(x => new { x, instance = CreateInstance(SampleSetHelpers.GetSampleSetTrainingSamples(trainingSamples, trainingSize, x), standardMin, standardMax) }) .ToDictionary(validation => validation.x, validation => validation.instance); return(this); }
/// <summary> /// /// </summary> /// <returns></returns> protected virtual Sample LoadSamples() { string dir = "."; var samples = new SampleSet(); PluginManager.Instance.LoadDirectory(dir); foreach (IPlugin plugin in PluginManager.Instance.InstalledPlugins) { if (plugin is SamplePlugin pluginInstance) { this.LoadedSamplePlugins.Add(pluginInstance.Name); foreach (SdkSample sample in pluginInstance.Samples) { this.LoadedSamples.Add(sample); } } } foreach (SdkSample sample in this.LoadedSamples) { if (!this.SampleCategories.Contains(sample.Metadata["Category"])) { this.SampleCategories.Add(sample.Metadata["Category"]); } } if (this.LoadedSamples.Count > 0) { this.SampleCategories.Add("All"); } return(null); }
IEnumerator AwaitSamplesAsync(SampleSet sampleSet, Chunk[] column) { for (;;) { if (sampleSet.results.Values.All(x => x.complete)) { // loop through the x and z axis. The GenerateColumn coroutine will build a column of blocks at this position. for (int x = column[0].pos.x; x < column[0].pos.x + Chunk.Size; x++) { for (int z = column[0].pos.z; z < column[0].pos.z + Chunk.Size; z++) { GenerateColumn(x, z, sampleSet, column); } } for (int i = 0; i < column.Length; i++) { Chunk chunk = column[i]; chunk.SetBlocksUnmodified(); Serialization.Load(chunk); chunk.built = true; } break; } else { yield return(null); } } yield return(null); }
public StringBuilder GetMultiSampleCommandLine(SampleSet <CanvasPedigreeSample> samples, GenomeMetadata genomeMetadata, Vcf vcf, IDirectoryLocation sampleSandbox) { StringBuilder commandLine = new StringBuilder(); foreach (var sampleKvp in samples) { var sampleId = sampleKvp.Key.Id; var sample = sampleKvp.Value; commandLine.Append($" --bam \"{sample.Bam.BamFile}\""); if (sample.SampleType != SampleType.Other) { commandLine.Append($" --{sample.SampleType.GetOptionName()} {sampleId}"); } } IFileLocation kmerFasta = _annotationFileProvider.GetKmerFasta(genomeMetadata); commandLine.Append($" --reference \"{kmerFasta}\""); IDirectoryLocation wholeGenomeFasta = new FileLocation(genomeMetadata.Sequences.First().FastaPath).Directory; commandLine.Append($" --genome-folder \"{wholeGenomeFasta}\""); IFileLocation filterBed = _annotationFileProvider.GetFilterBed(genomeMetadata); commandLine.Append($" --filter-bed \"{filterBed}\""); commandLine.Append($" --output \"{sampleSandbox}\""); return(commandLine); }
public CanvasSmallPedigreeOutput( Vcf cnvVcf, SampleSet <IntermediateOutput> intermediateOutputs) { CnvVcf = cnvVcf; IntermediateOutputs = intermediateOutputs; }
public async void ReadToday() { // 1. Use the specified data type (DtContinuousStepsDelta) to call the data controller to query // the summary data of this data type of the current day. var TodaySummationTask = MyDataController.ReadTodaySummationAsync(DataType.DtContinuousStepsDelta); // 2. Calling the data controller to query the summary data of the current day is an // asynchronous Task. // Note: In this example, the inserted data time is fixed at 2020-12-15 09:05:00. // When commissioning the API, you need to change the inserted data time to the current date // for data to be queried. try { await TodaySummationTask; if (TodaySummationTask.IsCompleted && TodaySummationTask.Result != null) { SampleSet result = TodaySummationTask.Result; if (TodaySummationTask.Exception == null) { Logger("Success read today summation from HMS core"); ShowSampleSet(result); Logger(Split); } else { PrintFailureMessage(TodaySummationTask.Exception, "ReadTodaySummation"); } } } catch (Exception ex) { PrintFailureMessage(ex, "ReadTodaySummation"); } }
public static void CreateRenderSingleTestScene(this SampleSet sampleSet) { #if GLTFAST_RENDER_TEST // Texture2D dummyReference = null; var testScene = EditorSceneManager.OpenScene("Assets/Scenes/TestScene.unity"); foreach (var item in sampleSet.GetItems()) { // var settingsGameObject = new GameObject("GraphicsTestSettings"); // var graphicsTestSettings = settingsGameObject.AddComponent<UniversalGraphicsTestSettings>(); var go = new GameObject(item.name); var gltfAsset = go.AddComponent <GltfBoundsAsset>(); if (string.IsNullOrEmpty(sampleSet.streamingAssetsPath)) { gltfAsset.url = Path.Combine(sampleSet.baseLocalPath, item.path); } else { gltfAsset.url = Path.Combine(sampleSet.streamingAssetsPath, item.path); gltfAsset.streamingAsset = true; } gltfAsset.loadOnStartup = true; gltfAsset.createBoxCollider = false; } var scenePath = string.Format("Assets/Scenes/{0}.unity", sampleSet.name); EditorSceneManager.SaveScene(testScene, scenePath); AssetDatabase.Refresh(); #else Debug.LogWarning("Please install the Graphics Test Framework for render tests to work."); #endif }
public static SampleSet LoadLearnFromUFS(this SampleSet tableSet, string fileName) { XmlDocument Source = new XmlDocument(); Source.Load(fileName); return(LoadLearnFromUFS(Source)); }
public Sample(HitsoundLayer hl) { _sampleArgs = hl.SampleArgs.Copy(); _priority = hl.Priority; _sampleSet = hl.SampleSet; _hitsound = hl.Hitsound; }
public Sample(SampleSet sampleSet, Hitsound hitsound, SampleGeneratingArgs sampleArgs, int priority) { _sampleArgs = sampleArgs; _priority = priority; _sampleSet = sampleSet; _hitsound = hitsound; }
public Sample() { _sampleArgs = new SampleGeneratingArgs(); _priority = 0; _sampleSet = SampleSet.Normal; _hitsound = Hitsound.Normal; }
public Region Generate(Chunk[] column) { SampleSet sampleSet = GetSampleSet(); Region sampleRegion = new Region( new World3(column[0].pos.x, column.Min(chunk => chunk.pos.y), column[0].pos.z), new World3( column[0].pos.x + Chunk.Size - 1, column.Max(chunk => chunk.pos.y) + Chunk.Size - 1, column[0].pos.z + Chunk.Size - 1) ); sampleSet.SetRegion(sampleRegion); generator.SampleNoise(sampleSet); PopulateSpawns(sampleSet, column[0].pos); if (Config.Multithreaded) { this.StartCoroutineAsync(AwaitSamplesAsync(sampleSet, column)); } else { StartCoroutine(AwaitSamples(sampleSet, column)); } return(sampleSet.region); }
public SampleSet GetAdditions() { SampleSet additions = SampleSet.Auto; int bestPriority = int.MaxValue; foreach (var sample in Samples) { if (sample.Hitsound != 0 && sample.Priority <= bestPriority) { additions = sample.SampleSet; bestPriority = sample.Priority; } } // If only auto was found, try to get a sampleset from the normals if (additions == SampleSet.Auto) { bestPriority = int.MaxValue; foreach (var sample in Samples) { if (sample.Hitsound == 0 && sample.Priority <= bestPriority) { additions = sample.SampleSet; bestPriority = sample.Priority; } } } return(additions); }
public static void writeAboutTable(XmlWriter writer, SampleSet samplesSet, IFuzzySystem Classifier) { writer.WriteStartElement("Table"); writer.WriteAttributeString("Name", samplesSet.FileName); if (samplesSet == Classifier.LearnSamplesSet) { writer.WriteAttributeString("Type", "Training"); } else { writer.WriteAttributeString("Type", "Testing"); } writer.WriteAttributeString("Output", samplesSet.OutputAttribute.Name); writer.WriteStartElement("Attributes"); writer.WriteAttributeString("Count", XmlConvert.ToString(samplesSet.CountVars + 1)); for (int i = 0; i < samplesSet.CountVars; i++) { writeAboutAttribute(writer, samplesSet.InputAttributes[i]); } writeAboutAttribute(writer, samplesSet.OutputAttribute); writer.WriteEndElement(); writeAboutRows(writer, samplesSet); writer.WriteEndElement(); }
/// <summary> /// Позволяе расчитать отнормированную дистанцию между двумя объектами SampleSet.RowSample. /// Учитывает только выходной параметр. /// </summary> /// <param name="A">Первый входной вектор типа SampleSet.RowSample</param> /// <param name="B">Второй входной вектор типа SampleSet.RowSample</param> /// <param name="dataInfoA">SampleSet используемый для корректной нормировки значений вектора A</param> /// <param name="dataInfoB">SampleSet используемый для корректной нормировки значений вектора B</param> /// <param name="type">Тип вычисляемого расстояния Евклидово, Манхетенское и др.</param> /// <returns>Растояние типа double</returns> public static double getOutputNormalizedDistance(this SampleSet.RowSample A, SampleSet.RowSample B, SampleSet dataInfoA, SampleSet dataInfoB = null, DistanceType type = DistanceType.Euclidean) { if (dataInfoB == null) { dataInfoB = dataInfoA; } double result = 0; switch (type) { case DistanceType.Euclidean: { result += Math.Pow(dataInfoA.OutputAttribute.EvaluteNormalisedValue(A.DoubleOutput) - dataInfoB.OutputAttribute.EvaluteNormalisedValue(B.DoubleOutput), 2.0); return(Math.Sqrt(result)); } case DistanceType.Manhattan: { result += Math.Abs(dataInfoA.OutputAttribute.EvaluteNormalisedValue(A.DoubleOutput) - dataInfoB.OutputAttribute.EvaluteNormalisedValue(B.DoubleOutput)); return(result); } // case DistanceType.Mahalanobis: throw new NotImplementedException("Нет реализации необходимы формулы"); // case DistanceType.Gaussian: throw new NotImplementedException("Нет реализации необходимы формулы"); default: { result += Math.Pow(A.DoubleOutput - B.DoubleOutput, 2.0); return(Math.Sqrt(result)); } } }
/// <summary> /// Make sure a certain hitsound with a certain sound is in the <see cref="SampleSchema"/>. /// If it already exists, then it simply returns the index and sampleset of that filename. /// </summary> /// <param name="samples">List of <see cref="SampleGeneratingArgs"/> that represents the sound that has to be made.</param> /// <param name="hitsoundName">Name of the hitsound. For example "hitwhistle" or "slidertick".</param> /// <param name="sampleSet">Sample set for the hitsound for if it adds a new sample to the sample schema.</param> /// <param name="newIndex">Index to start searching from. It will start at this value and go up until a slot is available.</param> /// <param name="newSampleSet">The sample set of the added sample.</param> /// <param name="startIndex">The index of the added sample.</param> /// <returns>True if it added a new entry.</returns> public bool AddHitsound(List <SampleGeneratingArgs> samples, string hitsoundName, SampleSet sampleSet, out int newIndex, out SampleSet newSampleSet, int startIndex = 1) { // Check if our sample schema already has a sample for this var filename = FindFilename(samples, "^(normal|soft|drum)-" + hitsoundName); if (filename != null) { newIndex = HitsoundImporter.GetIndexFromFilename(filename); newSampleSet = HitsoundImporter.GetSamplesetFromFilename(filename); return(false); } // Make a new sample with the same sound as all the samples mixed and add it to the sample schema int index = startIndex; newSampleSet = sampleSet; // Find an index which is not taken in the sample schema while (Keys.Any(o => Regex.IsMatch(o, "^(normal|soft|drum)-" + hitsoundName) && HitsoundImporter.GetIndexFromFilename(o) == index && HitsoundImporter.GetSamplesetFromFilename(o) == sampleSet)) { index++; } newIndex = index; filename = $"{sampleSet.ToString().ToLower()}-{hitsoundName}{(index == 1 ? string.Empty : index.ToInvariant())}"; Add(filename, samples); return(true); }
/// <summary> /// Grabs the <see cref="SampleSet"/> relying on priority with both itself and other layers of the same sample. /// </summary> /// <returns></returns> public SampleSet GetSampleSet() { SampleSet sampleSet = SampleSet.None; int bestPriority = int.MaxValue; foreach (var sample in Samples) { if (sample.Hitsound == 0 && sample.Priority <= bestPriority) { sampleSet = sample.SampleSet; bestPriority = sample.Priority; } } // If only auto was found, try to get a sampleset from the additions if (sampleSet == SampleSet.None) { bestPriority = int.MaxValue; foreach (var sample in Samples) { if (sample.Hitsound != 0 && sample.Priority <= bestPriority) { sampleSet = sample.SampleSet; bestPriority = sample.Priority; } } } return(sampleSet); }
public void GetDots(TSAFuzzySystem approx, KnowlegeBaseTSARules knowlegeBase) { // Dots (COMPLETE, BUT DOUBLECHECK WHEN DEBUGGING) var inputs = approx.LearnSamplesSet.DataRows.AsParallel().AsOrdered() .Select(dataRow => dataRow.InputAttributeValue).ToList(); var localDots = inputs.AsParallel().AsOrdered() .Where(InBetweenTheLimits).ToList(); // var strs = new List<string[]>(localDots.Count); // for (int i = 0; i < strs.Capacity; i++) // strs.Add(new[] { string.Empty }); var rezs = approx.LearnSamplesSet.DataRows.AsParallel().AsOrdered() .Where(row => localDots.Contains(row.InputAttributeValue)) .Select(dataRow => dataRow.DoubleOutput).ToList(); List <SampleSet.RowSample> rows = localDots.Select((t, i) => new SampleSet.RowSample(t, null, rezs[i], null)).ToList(); var samples = new SampleSet("1.dat", rows, approx.LearnSamplesSet.InputAttributes, approx.LearnSamplesSet.OutputAttribute); system = new TSAFuzzySystem(samples, samples); var usedRules = GetRules(knowlegeBase); system.RulesDatabaseSet.Add(new KnowlegeBaseTSARules(knowlegeBase, null)); }
public static void writeAboutRows(XmlWriter writer, SampleSet samplesSet) { writer.WriteStartElement("Rows"); writer.WriteAttributeString("Count", XmlConvert.ToString(samplesSet.DataRows.Count())); for (int i = 0; i < samplesSet.DataRows.Count; i++) { writer.WriteStartElement("Row"); for (int j = 0; j < samplesSet.CountVars; j++) { if (samplesSet.InputAttributes[j].LabelsValues.Count() > 0) { writer.WriteElementString(XmlConvert.EncodeName(samplesSet.InputAttributes[j].Name), samplesSet.DataRows[i].InputAttributeString[j]); } else { writer.WriteElementString(XmlConvert.EncodeName(samplesSet.InputAttributes[j].Name), XmlConvert.ToString(samplesSet.DataRows[i].InputAttributeValue[j])); } } if (samplesSet.OutputAttribute.Type == SampleSet.AttributeInfo.TypeAttribute.nominate) { writer.WriteElementString(XmlConvert.EncodeName(samplesSet.OutputAttribute.Name), samplesSet.DataRows[i].StringOutput); } else { writer.WriteElementString(XmlConvert.EncodeName(samplesSet.OutputAttribute.Name), XmlConvert.ToString(samplesSet.DataRows[i].DoubleOutput)); } writer.WriteEndElement(); } writer.WriteEndElement(); }
public UseGltfSampleSetTestCaseAttribute(string sampleSetPath) { var json = File.ReadAllText(Path.Combine(Application.streamingAssetsPath, sampleSetPath)); m_sampleSet = ScriptableObject.CreateInstance <SampleSet>(); JsonUtility.FromJsonOverwrite(json, m_sampleSet); }
public void Include(int generation, int consensusSize, TimeSpan delta) { lock (generations) { if (generation < minGeneration) { return; } int max = 0; foreach (var b in generations) { if (b.Generation == generation) { b.Samples.Add(delta); return; } else { max = b.Generation; } } if (generations.IsEmpty) { minGeneration = generation; var set = new SampleSet(generation, consensusSize); set.Samples.Add(delta); generations.Enqueue(set); } } }
internal SampleSetInfo parseSampleSet(string sample) { //most optimal way. need to rewrite if there are ever more samplesets :p. //like there are now. >_< string[] split = sample.Split('|'); SampleSet sampleSet = (SampleSet)Convert.ToInt32(split[0]); SampleSet normalSampleSet = SampleSet.None; float volume = 1; if (split.Length > 1) { volume = int.Parse(split[1]) / 100f; } if (split.Length > 2 && split[2].Length > 0) { normalSampleSet = (SampleSet)Convert.ToInt32(split[2]); } if (normalSampleSet == SampleSet.None) { normalSampleSet = sampleSet; } return(new SampleSetInfo { SampleSet = sampleSet, CustomSampleSet = CustomSampleSet.Default, Volume = volume, AdditionSampleSet = normalSampleSet }); }
internal override void PlaySound() { ControlPoint p = hitObjectManager.Beatmap.ControlPointAt(StartTime + 2); SampleSet ss = SampleSet == SampleSet.None ? p.SampleSet : SampleSet; AudioEngine.PlayTickSamples(new HitSoundInfo(SoundType, ss, p.CustomSamples, p.Volume, SampleSetAdditions == Audio.SampleSet.None ? ss : SampleSetAdditions), PositionalSound); }
/// <summary> /// Write out the ploidy vcf file if ploidy information is available from the vcf header /// </summary> public Vcf CreatePloidyVcf(SampleSet <SexPloidyInfo> ploidyInfos, GenomeMetadata genomeMetadata, IDirectoryLocation sampleSandbox) { var ploidyVcf = new Vcf(sampleSandbox.GetFileLocation(PloidyVcfName)); _ploidyFixer.WritePloidyVcfFile(ploidyVcf, ploidyInfos, genomeMetadata); return(ploidyVcf); }
private static List <HitsoundLayer> ImportStoryboard(string path, bool volumes, bool removeDuplicates, Beatmap beatmap, string mapDir, string prefix = null) { var hitsoundLayers = new List <HitsoundLayer>(); prefix = prefix ?? string.Empty; foreach (var sbSample in beatmap.StoryboardSoundSamples) { var filepath = sbSample.FilePath; string samplePath = Path.Combine(mapDir, filepath); var filename = Path.GetFileNameWithoutExtension(filepath); var volume = volumes ? sbSample.Volume : 1; SampleSet sampleSet = GetSamplesetFromFilename(filename); Hitsound hitsound = GetHitsoundFromFilename(filename); var importArgs = new LayerImportArgs(ImportType.Storyboard) { Path = path, SamplePath = samplePath, Volume = volume, DiscriminateVolumes = volumes, RemoveDuplicates = removeDuplicates }; // Find the hitsoundlayer with this path HitsoundLayer layer = hitsoundLayers.Find(o => o.ImportArgs == importArgs); if (layer != null) { // Find hitsound layer with this path and add this time layer.Times.Add(sbSample.StartTime); } else { // Add new hitsound layer with this path HitsoundLayer newLayer = new HitsoundLayer(prefix + filename, sampleSet, hitsound, new SampleGeneratingArgs(samplePath) { Volume = volume }, importArgs); newLayer.Times.Add(sbSample.StartTime); hitsoundLayers.Add(newLayer); } } if (removeDuplicates) { foreach (var hitsoundLayer in hitsoundLayers) { hitsoundLayer.Times.Sort(); hitsoundLayer.RemoveDuplicates(); } } return(hitsoundLayers); }
// Token: 0x06003415 RID: 13333 // RVA: 0x000264FE File Offset: 0x000246FE public static Struct69 smethod_0(HitObjectSoundType hitObjectSoundType_1, SampleSet sampleSet_2, Enum38 enum38_1, int int_1, SampleSet sampleSet_3) { if (hitObjectSoundType_1 == HitObjectSoundType.None || (Class885.class547_0.bool_6 && Class466.Current.method_5() != PlayModes.OsuMania)) { hitObjectSoundType_1 |= HitObjectSoundType.Normal; } return new Struct69(hitObjectSoundType_1, sampleSet_2, enum38_1, int_1, sampleSet_3); }
// Token: 0x06003414 RID: 13332 // RVA: 0x000264D7 File Offset: 0x000246D7 public Struct69(HitObjectSoundType hitObjectSoundType_1, SampleSet sampleSet_2, Enum38 enum38_1, int int_1, SampleSet sampleSet_3) { this.hitObjectSoundType_0 = hitObjectSoundType_1; this.sampleSet_0 = sampleSet_2; this.sampleSet_1 = sampleSet_3; this.enum38_0 = enum38_1; this.int_0 = int_1; }
// Token: 0x0600212F RID: 8495 // RVA: 0x000C0588 File Offset: 0x000BE788 internal Class478(string string_0) { string text = string_0; SampleSet sampleSet; if (Class558.smethod_0<SampleSet>(text, ref sampleSet) && sampleSet != SampleSet.None) { this.sampleSet_0 = sampleSet; this.bool_0 = true; text = text.Substring(this.sampleSet_0.ToString().Length, text.Length - this.sampleSet_0.ToString().Length); if (Class558.smethod_0<SampleSet>(text, ref sampleSet) && sampleSet != SampleSet.None) { this.sampleSet_1 = sampleSet; this.bool_1 = true; text = text.Substring(this.sampleSet_1.ToString().Length, text.Length - this.sampleSet_1.ToString().Length); } } HitObjectSoundType hitObjectSoundType; if (Class558.smethod_0<HitObjectSoundType>(text, ref hitObjectSoundType) && hitObjectSoundType != HitObjectSoundType.None && hitObjectSoundType != HitObjectSoundType.Normal) { this.hitObjectSoundType_0 = hitObjectSoundType; text = text.Substring(this.hitObjectSoundType_0.ToString().Length, text.Length - this.hitObjectSoundType_0.ToString().Length); } int num; if (int.TryParse(text, out num)) { this.enum38_0 = (Enum38)num; this.bool_2 = true; } if (this.hitObjectSoundType_0 != HitObjectSoundType.None && this.bool_0 && !this.bool_1) { this.sampleSet_1 = this.sampleSet_0; this.sampleSet_0 = SampleSet.All; this.bool_0 = false; this.bool_1 = true; } if (!this.ToString().Equals(this.vmethod_0().ToString() + string_0)) { throw new Exception("Invalid hitsound trigger description after " + this.ToString()); } }
public static int MinSampleIndex2Length(SampleSet<SampleBcls> bcls) { var sampleIndexes = SampleIndexes(bcls); return MinSampleIndex2Length(sampleIndexes); }
private static IEnumerable<SampleIndex> SampleIndexes(SampleSet<SampleBcls> bcls) { return bcls.SampleData.SelectMany(sampleBcls => sampleBcls.LaneIndexes.Values.SelectMany(indexes => indexes)); }
public static List<SampleIndex> AllIndexes(SampleSet<HashSet<SampleIndex>> laneIndexes) { return laneIndexes.SampleData.SelectMany(index => index).ToList(); }
public static IEnumerable<Tuple<KeyValuePair<SampleInfo, SampleIndex>, KeyValuePair<SampleInfo, SampleIndex>>> GetIndexCombinations(SampleSet<HashSet<SampleIndex>> sampleIndexes) { foreach (var indexPair in sampleIndexes.GetPairs()) { foreach (var indexItem1 in indexPair.Item1.Value) { foreach (var indexItem2 in indexPair.Item2.Value) { yield return Tuple.Create(new KeyValuePair<SampleInfo, SampleIndex>(indexPair.Item1.Key, indexItem1), new KeyValuePair<SampleInfo, SampleIndex>(indexPair.Item2.Key, indexItem2)); } } } }
// Token: 0x06002239 RID: 8761 internal abstract Class325 vmethod_2(int int_0, int int_1, HitObjectSoundType hitObjectSoundType_0, SampleSet sampleSet_0, SampleSet sampleSet_1, Enum38 enum38_0, int int_2, string string_0);
// Token: 0x0600223A RID: 8762 internal abstract Class305 vmethod_3(Vector2 vector2_0, int int_0, int int_1, bool bool_0, HitObjectSoundType hitObjectSoundType_0, int int_2, SampleSet sampleSet_0, SampleSet sampleSet_1, Enum38 enum38_0, int int_3, string string_0);
// Token: 0x06002BDD RID: 11229 // RVA: 0x0011C5E8 File Offset: 0x0011A7E8 internal override Class305 vmethod_0(Vector2 vector2_0, int int_2, bool bool_0, HitObjectSoundType hitObjectSoundType_0, int int_3, SampleSet sampleSet_0, SampleSet sampleSet_1, Enum38 enum38_0, int int_4, string string_2) { this.method_0(ref vector2_0, int_2); Class316 class = new Class316(this.class297_0, vector2_0, int_2, bool_0, hitObjectSoundType_0, this.method_1());
// Token: 0x06002BE6 RID: 11238 // RVA: 0x0011C8CC File Offset: 0x0011AACC internal override Class305 vmethod_0(Vector2 vector2_0, int int_0, bool bool_0, HitObjectSoundType hitObjectSoundType_0, int int_1, SampleSet sampleSet_0, SampleSet sampleSet_1, Enum38 enum38_0, int int_2, string string_0) { Class306 class = new Class306(this.class297_0, vector2_0, int_0, bool_0, hitObjectSoundType_0, int_1);
public SampleSet generateSamples(string path) { SampleSet set = new SampleSet(); var files = Utils.GetFiles(path); int percentage = files.Length / 100; Parallel.For(0, files.Length, j => { var f = files[j]; bool continue_ = true; if (System.IO.Path.GetExtension(f) != ".mid") continue_ = false; Console.WriteLine("Adding {0}",System.IO.Path.GetFileNameWithoutExtension(f)); if (continue_) { SampleSet songset = new SampleSet(); try { songset = LoadSampleSetFromMelody(files[j]); } catch(Exception e) { Console.WriteLine(e.Message); } set.addAll(songset); } }); return set; }
public SampleSet LoadSampleSetFromComposition(Composition comp) { SampleSet set = new SampleSet(); if (comp.Tracks.Count < 2) return set; var mainSeq = comp.Tracks[0].GetMainSequence() as MelodySequence; var mainNotes = mainSeq.ToArray(); for (int track = 1; track < comp.Tracks.Count; track++) { Dictionary<Note, int> frequencies = new Dictionary<Note, int>(); if (comp.Tracks[track].Instrument != instrument) { // Console.WriteLine("\tSkipping instrument {0}", comp.Tracks[track].Instrument); continue; } var seq = comp.Tracks[track].GetMainSequence() as MelodySequence; var notes = seq.ToArray(); var max = Math.Min(mainNotes.Length, notes.Length); if(seq.TotalNoteDuration() < seq.TotalRestDuration()) { continue; } Console.WriteLine("\tAdding instrument {0}", comp.Tracks[track].Instrument); for (int j = 0; j < max; j++ ) { if (!frequencies.ContainsKey(notes[j])) frequencies[notes[j]] = 1; else frequencies[notes[j]] += 1; } /* // Filtering for (int j = 0; j < max; j++) { double normalizedFrequency = frequencies[notes[j]] / (double)max; // if (normalizedFrequency < filterThreshold) // continue; if (notes[j].Velocity > 0) notes[j].Velocity = 127; if (!noteHashes.ContainsKey(notes[j])) noteHashes[notes[j]] = hash++; }*/ int mainTrackTime = 0; int accompTrackTime = 0; int incr = 0; for (int j = 0; j < max; j++) { // make sure to use closest note if (j + incr >= max) break; /* mainTrackTime += mainNotes[j].Duration; accompTrackTime += notes[j + incr].Duration; while(accompTrackTime < mainTrackTime) { incr++; if (j + incr + 1 > max) break; accompTrackTime += notes[j + incr].Duration; }*/ if (j + incr > max - 1) break; /* if (!noteHashes.ContainsKey(notes[j + incr])) continue; // caching notes if (noteHashes[notes[j + incr]] > MAX_OUTPUTS - 1) continue; */ Sample s = GetSample(mainNotes[j],notes[j+incr]); set.add(s); } } return set; }
public SampleSet GenerateSamples(Composition[] compositions) { SampleSet set = new SampleSet(); foreach(var comp in compositions) { SampleSet songset = new SampleSet(); //try // { songset = LoadSampleSetFromComposition(comp); // } // catch (Exception e) // { // Console.WriteLine(e.Message); // } set.addAll(songset); } return set; }
public Note[] GenerateMelody(MelodySequence inputSeq) { SampleSet set = new SampleSet(); var mainNotes = inputSeq.ToArray(); for (int j = 0; j < mainNotes.Length && j < 200; j++) { Sample s = GetSample(mainNotes[j],null); set.add(s); } List<Note> notes = new List<Note>(); var reverseHashes = Utils.ReverseDictionary(noteHashes); int mainTime = 0; int accompTime = 0; foreach (Sample sample in set) { var res = ComputeNetworkOutput(sample); //var maxIndex = GetMaxIndex(res); Note outputNote = null; if(res[0] <= 0.05) outputNote = new Note(-1, (int)(res[1] * 64.0),0); else outputNote = new Note((NoteNames)(res[0] * 12), 4, (Durations)(res[1] * 64.0)); outputNote.StandardizeDuration(); notes.Add(outputNote); /* if (reverseHashes.ContainsKey(maxIndex)) { outputNote = reverseHashes[maxIndex]; notes.Add(outputNote); /*accompTime += outputNote.Duration; if (mainTime > accompTime) { notes.Add(new Note(-1, mainTime - accompTime, 0)); accompTime += mainTime - accompTime; } mainTime += (int)(sample.getInput()[1] * 64.0);*/ //} } return notes.ToArray(); }
//!! Need to eliminate some high frequencies? //!! Need to make real-time! //!! Does not take into account the offset! //!! Should allow cubic spline (matlab: spline() ) //!! Should start samples at the first uniform sample? public SampleSet<float> FilterData(Sample<float>[] input) { if (AutoSampleRate) { // Find minimum time between samples samplesPerSecond = GetMinimumSamplesPerSecond(input); } TimeSpan minTime = TimeSpan.FromSeconds(1.0f / samplesPerSecond); // Get new sample times float[] newSampleTimesArray = Enumerable.Range(0, int.MaxValue). Select(x => (float)(x / samplesPerSecond)). TakeWhile(x => x <= input.Last().Time.TotalSeconds). ToArray(); //float[] newSampleTimesArray = // Enumerable.Range(0, int.MaxValue). // Select(x => (float)(input.First().Time.TotalSeconds + (x / samplesPerSecond))). // TakeWhile(x => x <= input.Last().Time.TotalSeconds). // ToArray(); // Create a list of new samples SampleSet<float> output = new SampleSet<float>(TimeSpan.Zero, samplesPerSecond, newSampleTimesArray.Length); int outputCount = 0; // Add first point output.Values[0] = input.First().Value; outputCount++; // Linearly interpolate points Sample<float> lastSample = new Sample<float>(TimeSpan.Zero, 0); foreach (Sample<float> sample in input) { double m = (sample.Value - lastSample.Value) / (sample.Time - lastSample.Time).TotalSeconds; double b = lastSample.Value; double p = lastSample.Time.TotalSeconds; while (outputCount < newSampleTimesArray.Length && newSampleTimesArray[outputCount] <= sample.Time.TotalSeconds) { float newTime = newSampleTimesArray[outputCount]; // Interpolate value output.Values[outputCount] = (float)((newTime - p) * m + b); outputCount++; } lastSample = sample; } //Return filtered data return output; }
public static SampleSet<HashSet<SampleIndex>> GetSampleIndexesForLane(SampleSet<SampleBcls> bcls, int lane) { return bcls .WhereData(sampleBcls => sampleBcls.LaneIndexes.ContainsKey(lane)) .SelectData(sampleBcls => sampleBcls.LaneIndexes[lane]); }
// Token: 0x06002238 RID: 8760 internal abstract Class320 vmethod_1(Vector2 vector2_0, int int_0, bool bool_0, HitObjectSoundType hitObjectSoundType_0, CurveTypes curveTypes_0, int int_1, double double_0, List<Vector2> list_0, List<HitObjectSoundType> list_1, int int_2, SampleSet sampleSet_0, SampleSet sampleSet_1, List<SampleSet> list_2, List<SampleSet> list_3, Enum38 enum38_0, int int_3, string string_0);
internal static void Clear() { currentCustom = 2; currentSound = SoundType.Normal; currentSet = SampleSet.Normal; sampleDict.Clear(); fileDict.Clear(); }
public static IEnumerable<int> GetLanesUsed(SampleSet<SampleBcls> bcls) { return bcls.SampleData.SelectMany(sampleBcls => sampleBcls.LaneIndexes.Keys).Distinct(); }
public RandomFork(SampleSet<IEnumerable<IRandomVariable>> forks) { _set = forks; }
public static IEnumerable<IDirectoryLocation> GetRunFolders(SampleSet<IEnumerable<SampleBcls>> samples) { return GetBclRunFolders(samples).Select(b => b.RunFolder); }
//custom>set>sound private static void update() { currentSound = (SoundType)((int)currentSound << 1); if (currentSound > SoundType.Clap) { currentSound = SoundType.Normal; currentSet = (SampleSet)((int)currentSet << 1); } if (currentSet > SampleSet.Soft) { currentSet = SampleSet.Normal; currentCustom++; } }
public static List<BclRunFolder> GetBclRunFolders(SampleSet<IEnumerable<SampleBcls>> inputSamples) { return inputSamples.SampleData.SelectMany(bcls => bcls.Select(b => b.BclRunFolder)) .Distinct().ToList(); }
/// <summary> /// /// </summary> /// <returns></returns> protected virtual Sample LoadSamples() { string dir = "../samples"; var samples = new SampleSet(); PluginManager.Instance.LoadDirectory( dir ); foreach ( IPlugin plugin in PluginManager.Instance.InstalledPlugins ) { if ( plugin is SamplePlugin ) { var pluginInstance = (SamplePlugin)plugin; this.LoadedSamplePlugins.Add( pluginInstance.Name ); foreach ( SdkSample sample in pluginInstance.Samples ) { this.LoadedSamples.Add( sample ); } } } foreach ( SdkSample sample in this.LoadedSamples ) { if ( !this.SampleCategories.Contains( sample.Metadata[ "Category" ] ) ) { this.SampleCategories.Add( sample.Metadata[ "Category" ] ); } } if ( this.LoadedSamples.Count > 0 ) { this.SampleCategories.Add( "All" ); } return null; }
public static SampleSet<SampleBcls> GetSampleBclsForRunFolder(SampleSet<IEnumerable<SampleBcls>> sampleBcls, BclRunFolder bclRunFolder) { return sampleBcls.SelectData(bcls => bcls.First( b => b.BclRunFolder.RunFolder.FullName.Equals(bclRunFolder.RunFolder.FullName, Utilities.IsThisMono() ? StringComparison.Ordinal : StringComparison.OrdinalIgnoreCase))); }