/// <summary> /// Is the specified object equivalent to this instance? (All fields identical) /// </summary> /// <param name="obj">The object to compare.</param> /// <returns><see langword="true"/>, if the object is a <see cref="PersonName"/> and ALL fields are identical; otherwise, <see langword="false"/></returns> public override bool Equals(object obj) { if (!(obj is PersonName pn)) { return(false); } if (First.HasChanges(pn.First)) { return(false); } if (Last.HasChanges(pn.Last)) { return(false); } if (Middle.HasChanges(pn.Middle)) { return(false); } if (Prefix.HasChanges(pn.Prefix)) { return(false); } if (Suffix.HasChanges(pn.Suffix)) { return(false); } if (Nickname.HasChanges(pn.Nickname)) { return(false); } return(true); }
private void AddSuffix(SuffixDictionaryLine entry, Dictionary <string, Suffix> suffixesById, MorphemeSurfaceDictionary <Suffix> suffixes) { string id = entry.Id; string lex = entry.Lex; MorphemeType morphemeType; if (!Enum.TryParse(entry.Type, out morphemeType)) { morphemeType = MorphemeType.O; Console.WriteLine("Invalid Morpheme Type: " + entry.Type); } string[] flags = entry.Flags.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries); string[] rulesToken = entry.Rules.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries); Debug.Assert(entry.Surfaces != null, "entry.Surfaces != null"); var surfaces = new List <string>(entry.Surfaces.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries)); List <OrthographyRule> rules = _orthography.GetRules(rulesToken); var suffix = new Suffix(id, lex, morphemeType, LabelSet.ConvertLabelNamesToIndexes(flags), rules); suffixesById.Add(id, suffix); foreach (string surface in surfaces) { suffixes.Add(surface, suffix); } }
public override int GetHashCode() { unchecked { return(((Name?.GetHashCode() ?? 0) * 397) ^ (Suffix?.GetHashCode() ?? 0)); } }
public override int GetHashCode() { int hash = 1; if (matchPatternCase_ == MatchPatternOneofCase.Exact) { hash ^= Exact.GetHashCode(); } if (matchPatternCase_ == MatchPatternOneofCase.Prefix) { hash ^= Prefix.GetHashCode(); } if (matchPatternCase_ == MatchPatternOneofCase.Suffix) { hash ^= Suffix.GetHashCode(); } if (matchPatternCase_ == MatchPatternOneofCase.Regex) { hash ^= Regex.GetHashCode(); } if (matchPatternCase_ == MatchPatternOneofCase.SafeRegex) { hash ^= SafeRegex.GetHashCode(); } hash ^= (int)matchPatternCase_; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
public Inflection CreateInflection(decimal conjugation, string mood, string tense, bool isPassive) { Inflection inflection = new Inflection(); if ((tense == "Perfect" || tense == "Pluperfect" || tense == "Future Perfect") && isPassive) { Passive passive = _passives.Where(p => p.Tense == tense && p.Mood == mood).FirstOrDefault(); if (passive != null) { inflection.singular_first = $"{passive.singular_first} {_supineStem}us"; inflection.singular_second = $"{passive.singular_second} {_supineStem}us"; inflection.singular_third = $"{passive.singular_third} {_supineStem}us"; inflection.plural_first = $"{passive.plural_first} {_supineStem}i"; inflection.plural_second = $"{passive.plural_second} {_supineStem}i"; inflection.plural_third = $"{passive.plural_third} {_supineStem}i"; } } else { Suffix suffix = _suffixes.Where(s => s.Conjugation == conjugation && s.Mood == mood && s.Passive == isPassive && s.Tense == tense).FirstOrDefault(); inflection.singular_first = SplitSuffix(suffix.singular_first); inflection.singular_second = SplitSuffix(suffix.singular_second); inflection.singular_third = SplitSuffix(suffix.singular_third); inflection.plural_first = SplitSuffix(suffix.plural_first); inflection.plural_second = SplitSuffix(suffix.plural_second); inflection.plural_third = SplitSuffix(suffix.plural_third); } return(inflection); }
public Suffix GetSuffixOf(Prefix prefix) { string prefixValue = new string (prefix.value); int index = database.FindIndex(p => (p.prefix == prefixValue)); if (index != -1) { int multiplicitySum = database[index].suffixes.Select(m => m.multiplicity).ToArray().Sum(); int roll = Random.Range(0, multiplicitySum); int cumulative = 0; for (int i = 0; i < database[index].suffixes.Count; i++) { Suffix suffix = database[index].suffixes[i]; cumulative += suffix.multiplicity; if (roll < cumulative) { return(suffix); } } } return(new Suffix('*')); }
public bool FromString(string str) { // Sample value : Uid|GridX|GridY|OffsetX|OffsetY|Prefix|Suffix try { var strList = str.Split('|'); var i = 0; OverlayUid = strList[i++]; GridX = int.Parse(strList[i++]) / 4; GridY = int.Parse(strList[i++]) / 4; OffsetX = int.Parse(strList[i++]); OffsetY = int.Parse(strList[i++]); Prefix = strList[i++]; Suffix = strList[i++]; // Change the Prefix&Suffix blank value Prefix = Prefix.Replace("^", ""); Suffix = Suffix.Replace("^", ""); } catch (Exception e) { return(false); } return(true); }
public IActionResult EditPersonalInfo(EditPersonalInfo dto) { var student = _studentRepo.GetById(dto.StudentId); var favoriteCourse = Course.FromId(dto.FavoriteCourseId); var suffix = Suffix.FromId(dto.NameSuffixId); var emailResult = Email.Create(dto.Email); if (emailResult.IsFailure) { return(BadRequest(emailResult.Error)); } var nameResult = Name.Create(dto.FirstName, dto.LastName, suffix); if (nameResult.IsFailure) { return(BadRequest(nameResult.Error)); } student.EditPersonalInfo(nameResult.Value, emailResult.Value, favoriteCourse); _schoolContext.SaveChanges(); return(Ok()); }
protected override void ExecuteCodeCompletion(Suffix suffix, ITextControl textControl, IntellisenseManager intellisenseManager, bool automatic, IContextBoundSettingsStore settingsStore) { ResultProposalCollection = new ProposalCollection(); var parameters = CodeCompletionParameters.CreateSingle(CodeCompletionType.BasicCompletion); parameters.EvaluationMode = EvaluationMode.LightAndFull; FilteredLookupItems filteredItems; var result = GetCompletionResult( textControl, intellisenseManager, parameters, LookupListSorting.ByRelevance, out filteredItems, settingsStore); if (result != null) { ResultProposalCollection = result.LookupItems.ToProposalCollection(); } }
// longest common prefix of s and t private static int lcpSuffix(Suffix s, Suffix t) { int n = Math.min(s.length(), t.length()); for (int i = 0; i < n; i++) { if (s.charAt(i) != t.charAt(i)) return i; } return n; }
public IActionResult RegisterStudent(RegisterStudent dto) { var course = Course.FromId(dto.FavoriteCourseId); var suffix = Suffix.FromId(dto.NameSuffixId); //course is not from db context so state is detached if we do //_schoolContext.Students.Add(student); //here course state is added and //_schoolContext.SaveChanges(); causes // error because course is already exists //old way to fix //_schoolContext.Entry(course).State == EntityState.Unchanged; var emailResult = Email.Create(dto.Email); if (emailResult.IsFailure) { return(BadRequest(emailResult.Error)); } var nameResult = Name.Create(dto.FirstName, dto.LastName, suffix); if (nameResult.IsFailure) { return(BadRequest(nameResult.Error)); } var student = new Student(nameResult.Value, emailResult.Value, course, dto.FavoriteCourseGrade); _studentRepo.Save(student); _schoolContext.SaveChanges(); return(Ok()); }
public static (bool, Suffix) GetByName(string word) { Suffix result = GetItem(word); bool isExist = result != null; return(isExist, result); }
void CreateInstance() { if (instance == null) { instance = this; } }
/// <summary> /// Check Name equality. /// </summary> /// <param name="name"></param> /// <returns></returns> public bool Equals(Name name) => name == null ? false : Suffix.Equals(name.Suffix) && ( ReferenceEquals(this.Suffix, name.Suffix) || Suffix != null && Suffix.Equals(name.Suffix) ) && ( ReferenceEquals(this.First, name.First) || First != null && First.Equals(name.First) ) && ( ReferenceEquals(this.Middle, name.Middle) || Middle != null && Middle.Equals(name.Middle) ) && ( ReferenceEquals(this.Last, name.Last) || Last != null && Last.Equals(name.Last) ) && ( ReferenceEquals(this.Prefix, name.Prefix) || Prefix != null && Prefix.Equals(name.Prefix) ) && ( ReferenceEquals(this.IsOrganization, name.IsOrganization) || IsOrganization.Equals(name.IsOrganization) );
/** * Initializes a suffix array for the given {@code text} string. * @param text the input string */ public SuffixArray(String text) { int n = text.length(); this.suffixes = new Suffix[n]; for (int i = 0; i < n; i++) suffixes[i] = new Suffix(text, i); Arrays.sort(suffixes); }
public async Task Rename(int personId, string firstName, string lastName, int suffixId) { var firstNameResult = Name.Create(firstName); var lastNameResult = Name.Create(lastName); Maybe <Suffix> maybeSuffix = Suffix.FromId(suffixId); await Result .Combine(firstNameResult, lastNameResult, maybeSuffix.ToResult("Suffix is null.")) .Bind(() => _repository .GetByIdAsync(personId) .ToResult($"Person was not found for ID: {personId}")) .Bind(person => PersonName .Create(firstNameResult.Value, lastNameResult.Value, maybeSuffix.Value) .Tap(personName => person.Rename(personName))) .Tap(() => _unitOfWork.CommitAsync()) .Tap(() => _logger.LogInformation($"Person was renamed to {maybeSuffix.Value} {firstName} {lastName}")) .OnFailure(error => _logger.LogError(error)); /* * var firstNameResult = Name.Create(firstName); * var lastNameResult = Name.Create(lastName); * var personNameResult = PersonName.Create(firstNameResult.Value, lastNameResult.Value); * * var maybePerson = await _repository.GetById(personId); * maybePerson.Value.Rename(personNameResult.Value); */ }
public async Task Create(string firstName, string lastName, int age, int suffixId) { var firstNameResult = Name.Create(firstName); var lastNameResult = Name.Create(lastName); var ageResult = Age.Create(age); Maybe <Suffix> maybeSuffix = Suffix.FromId(suffixId); await Result .Combine(firstNameResult, lastNameResult, ageResult) .Bind(() => PersonName.Create(firstNameResult.Value, lastNameResult.Value, maybeSuffix.Value)) .Bind(personName => Person.Create(personName, ageResult.Value)) .Tap(person => _repository.Save(person)) .Tap(() => _unitOfWork.CommitAsync()) .Tap(() => _logger.LogInformation($"Person was stored to db: {firstName} {lastName}")) .OnFailure(error => _logger.LogError(error)); /* * var firstNameResult = Name.Create(firstName); * var lastNameResult = Name.Create(lastName); * var ageResult = Age.Create(age); * var personName = PersonName.Create(firstNameResult.Value, lastNameResult.Value); * var personResult = Person.Create(personName.Value, ageResult.Value); * * _repository.Save(personResult.Value); * await _unitOfWork.CommitAsync(); */ }
//public string ToString(string format) //{ // Dictionary<string, string> replacements = new Dictionary<string, string> // { // { "Tt", Title?.CapitalizeAll() }, // { "TT", Title?.ToUpper() }, // { "tt", Title?.ToLower() }, // { "Ff", Forename?.CapitalizeAll() }, // { "FF", Forename?.ToUpper() }, // { "ff", Forename?.ToLower() }, // { "F", Forename?.FirstOrDefault().ToString().ToUpper() }, // { "f", Forename?.FirstOrDefault().ToString().ToLower() }, // { "Mm", string.Join(' ', MiddleNames.Select(m => m?.CapitalizeAll())) }, // { "MM", string.Join(' ', MiddleNames.Select(m => m?.ToUpper())) }, // { "mm", string.Join(' ', MiddleNames.Select(m => m?.ToLower())) }, // { "M.", string.Join(' ', MiddleNames.Select(m => $"{m?.FirstOrDefault().ToString().ToUpper()}.")) }, // { "m.", string.Join(' ', MiddleNames.Select(m => $"{m?.FirstOrDefault().ToString().ToLower()}.")) }, // { "M", string.Join(' ', MiddleNames.Select(m => m?.FirstOrDefault().ToString().ToUpper())) }, // { "m", string.Join(' ', MiddleNames.Select(m => m?.FirstOrDefault().ToString().ToLower())) }, // { "Ll", Surname?.CapitalizeAll() }, // { "LL", Surname?.ToUpper() }, // { "ll", Surname?.ToLower() }, // { "L", Surname?.FirstOrDefault().ToString().ToUpper() }, // { "l", Surname?.FirstOrDefault().ToString().ToLower() }, // { "Ss", (Suffix ?? "").IsRomanNumeral() ? Suffix?.ToUpper() : Suffix?.CapitalizeAll() }, // { "SS", Suffix?.ToUpper() }, // { "ss", Suffix?.ToLower() } // }; // return format.ReplaceAll(replacements); //} public string ToString(NameFormat format) { string titleFormatted = Title?.ToString(format.TitleDisplayType) ?? string.Empty; string givenNamesFormatted = $"{Forename?.ToString(format.ForenameDisplayType)} {string.Join(' ', MiddleNames.Select(n => n?.ToString(format.MiddleNameDisplayType)))}".Trim(); string surnameFormatted = Surname?.ToString(format.SurnameDisplayType) ?? string.Empty; string suffixFormatted = Suffix?.ToString(format.SuffixDisplayType) ?? string.Empty; StringBuilder fullName = new StringBuilder(titleFormatted); switch (format.NameOrder) { case NameOrder.Eastern: if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(surnameFormatted)) { fullName.Append(' '); } fullName.Append(surnameFormatted); if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(givenNamesFormatted)) { fullName.Append(' '); } fullName.Append(givenNamesFormatted); break; case NameOrder.WesternReversed: if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(surnameFormatted)) { fullName.Append(' '); } fullName.Append(surnameFormatted); if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(givenNamesFormatted)) { fullName.Append(", "); } fullName.Append(givenNamesFormatted); break; case NameOrder.Western: default: if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(givenNamesFormatted)) { fullName.Append(' '); } fullName.Append(givenNamesFormatted); if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(surnameFormatted)) { fullName.Append(' '); } fullName.Append(surnameFormatted); break; } if (fullName.Length > 0 && !string.IsNullOrWhiteSpace(suffixFormatted)) { fullName.Append(' '); } fullName.Append(suffixFormatted); return(fullName.ToString()); }
/// <summary> /// Return the HashCode of this object. /// </summary> /// <returns>The HashCode of this object.</returns> public override Int32 GetHashCode() { unchecked { return(ProviderId.GetHashCode() * 17 ^ Suffix.GetHashCode()); } }
private void FixedPointerVSBlockCopy() { var sw2 = new Stopwatch(); unsafe { byte[] buf2 = Array.Empty <byte>(); sw2.Start(); for (int i = 0; i < 10000000; i++) { buf2 = setPageIndex_Prefix.ToArray().FastConcat(0, setPageSize_Prefix.Length, Suffix.ToArray(), 0, Suffix.Length); } Debug.WriteLine(buf2.ToString()); sw2.Stop(); } var sw = new Stopwatch(); sw.Start(); byte[] buffer = new byte[0]; for (int i = 0; i < 10000000; i++) { buffer = new byte[setPageSize_Prefix.Length + Suffix.Length]; Buffer.BlockCopy(setPageSize_Prefix.ToArray(), 0, buffer, 0, setPageSize_Prefix.Length); Buffer.BlockCopy(Suffix.ToArray(), 0, buffer, setPageIndex_Prefix.Length, Suffix.Length); } Debug.WriteLine(buffer.ToString()); sw.Stop(); Debug.WriteLine("BlockCopy:" + sw.ElapsedMilliseconds); Debug.WriteLine("Fixed pointer:" + sw2.ElapsedMilliseconds); }
public override int GetHashCode() { int hash = 1; if (FieldPath.Length != 0) { hash ^= FieldPath.GetHashCode(); } if (valueCase_ == ValueOneofCase.Exact) { hash ^= Exact.GetHashCode(); } if (valueCase_ == ValueOneofCase.Prefix) { hash ^= Prefix.GetHashCode(); } if (valueCase_ == ValueOneofCase.Suffix) { hash ^= Suffix.GetHashCode(); } hash ^= (int)valueCase_; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
private void btn_viewAll_Click(object sender, RoutedEventArgs e) { txtblk.Text += Seperator + "\n"; var sw = new Stopwatch(); sw.Start(); var buffer = new byte[setPageSize_Prefix.Length + Suffix.Length]; Buffer.BlockCopy(setPageSize_Prefix.ToArray(), 0, buffer, 0, setPageSize_Prefix.Length); Buffer.BlockCopy(Suffix.ToArray(), 0, buffer, setPageIndex_Prefix.Length, Suffix.Length); try { Connection !.WriteBytes(buffer, 0, buffer.Length); if (_isVerbose) { OutputWindowVerbose(BitConverter.ToString(buffer), Source.Send); } else { OutputWindowSimple(buffer, Source.Send); } toggleCommandButtons(false); } catch { ShowErrorMessage(ConnectionLost, ""); Connection?.Dispose(); } }
private void btn_startRecording_Click(object sender, RoutedEventArgs e) { txtblk.Text += Seperator + "\n"; var buffer = new byte[setRestart_Prefix.Length + Restart_Record_Payload.Length + Suffix.Length]; Buffer.BlockCopy(setRestart_Prefix.ToArray(), 0, dst: buffer, 0, count: setRestart_Prefix.Length); Buffer.BlockCopy(Restart_Record_Payload.ToArray(), 0, dst: buffer, setRestart_Prefix.Length, Restart_Record_Payload.Length); Buffer.BlockCopy(Suffix.ToArray(), 0, dst: buffer, setRestart_Prefix.Length + Restart_Record_Payload.Length, count: Suffix.Length); try { Connection !.WriteBytes(buffer, 0, buffer.Length); if (_isVerbose) { OutputWindowVerbose(BitConverter.ToString(buffer), Source.Send); } else { OutputWindowSimple(buffer, Source.Send); } toggleCommandButtons(false); } catch { ShowErrorMessage(ConnectionLost, ""); Connection?.Dispose(); } }
public PatternUnit(IPatternUnit prototype) { var p = (PatternUnitPrototype)prototype; Mode = p.Mode; Optional = p.Optional; CaseSensitive = p.CaseSensitive; Type = p.Type; POS = p.POS; Suffix = p.Suffix; Prefix = p.Prefix; Shape = p.Shape; Token = p.Token; Set = p.Set; EntityType = p.EntityType; SetHashes = p.SetHashes ?? (p.Set is null ? null : new HashSet <ulong>(p.Set.Select(token => p.CaseSensitive ? PatternUnitPrototype.Hash64(token.AsSpan()) : PatternUnitPrototype.IgnoreCaseHash64(token.AsSpan())))); TokenHash = p.TokenHash; LeftSide = p.LeftSide is object?new PatternUnit(p.LeftSide) : null; RightSide = p.RightSide is object?new PatternUnit(p.RightSide) : null; ValidChars = p.ValidChars; MinLength = p.MinLength; MaxLength = p.MaxLength; _splitSuffix = Suffix?.Split(splitCharWithWhitespaces, StringSplitOptions.RemoveEmptyEntries)?.Distinct()?.ToArray(); _splitPrefix = Prefix?.Split(splitCharWithWhitespaces, StringSplitOptions.RemoveEmptyEntries)?.Distinct()?.ToArray(); _splitEntityType = EntityType is object?new HashSet <string>(EntityType.Split(splitChar, StringSplitOptions.RemoveEmptyEntries)) : null; _splitShape = Shape is object?new HashSet <string>(Shape.Split(splitCharWithWhitespaces, StringSplitOptions.RemoveEmptyEntries)) : null; }
public override string ToString() { string result = Base.ToString(); switch (Base) { case SubjectBase.Ats: case SubjectBase.DoorL: case SubjectBase.DoorR: if (BaseOption >= 0) { result += BaseOption; } break; } if (Suffix != SubjectSuffix.None) { result += Suffix.ToString(); if (SuffixOption >= 0) { result += SuffixOption; } } return(result); }
internal bool NeedsToBeQpEncoded() { if (LastName.Any(x => x.NeedsToBeQpEncoded())) { return(true); } if (FirstName.Any(x => x.NeedsToBeQpEncoded())) { return(true); } if (MiddleName.Any(x => x.NeedsToBeQpEncoded())) { return(true); } if (Prefix.Any(x => x.NeedsToBeQpEncoded())) { return(true); } if (Suffix.Any(x => x.NeedsToBeQpEncoded())) { return(true); } return(false); }
public static Result <Name> Create(string firstName, string lastName, Suffix suffix) { if (string.IsNullOrWhiteSpace(firstName)) { return(Result.Fail <Name>("First name should not be empty")); } if (string.IsNullOrWhiteSpace(lastName)) { return(Result.Fail <Name>("Last name should not be empty")); } firstName = firstName.Trim(); lastName = lastName.Trim(); if (firstName.Length > 200) { return(Result.Fail <Name>("First name is too long")); } if (lastName.Length > 200) { return(Result.Fail <Name>("Last name is too long")); } return(Result.Success(new Name(firstName, lastName, suffix))); }
/// <inheritdoc /> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { var hashCode = 41; if (Visible != null) { hashCode = hashCode * 59 + Visible.GetHashCode(); } if (XAnchor != null) { hashCode = hashCode * 59 + XAnchor.GetHashCode(); } if (Offset != null) { hashCode = hashCode * 59 + Offset.GetHashCode(); } if (Prefix != null) { hashCode = hashCode * 59 + Prefix.GetHashCode(); } if (Suffix != null) { hashCode = hashCode * 59 + Suffix.GetHashCode(); } if (Font != null) { hashCode = hashCode * 59 + Font.GetHashCode(); } return(hashCode); } }
public static bool SaveFile(Texture2D tex, Suffix suffix, string directoryPath, string fileName) { byte[] bytes = null; try { switch (suffix)//暂时只支持两种常用格式 { case Suffix.jpg: bytes = tex.EncodeToJPG(); break; case Suffix.png: bytes = tex.EncodeToPNG(); break; default: break; } } catch (System.Exception) { Debug.LogErrorFormat("{0} file save failed", fileName); return(false); } return(SaveFile(bytes, suffix, directoryPath, fileName)); }
public static bool DeleteFiles(string directoryPath, Suffix suffix) { if (!Directory.Exists(directoryPath)) { return(false); } DirectoryInfo directory = new DirectoryInfo(directoryPath); string s = string.Format("*.{0}", suffix.ToString()); FileInfo[] files = directory.GetFiles(s, SearchOption.TopDirectoryOnly); bool isSuccess = true; for (int i = 0; i < files.Length; i++) { if (files[i] == null) { continue; } try { files[i].Delete(); } catch (Exception) { Debug.LogErrorFormat("Failed to delete file, the filePath is {0}", files[i].FullName); isSuccess = false; } } return(isSuccess); }
/// <summary> /// Parses strings like: /// SFX N 0 en [^ey] /// </summary> /// <param name="data"></param> /// <returns>null if parsing failes</returns> public static Suffix Parse(string[] data) { if (data.Length != 5) return null; if (data[0] != "SFX") return null; Suffix sfx = new Suffix (); if (!sfx.ParseData (data)) return null; return sfx; }
protected override sealed void AfterCompletion( ITextControl textControl, ISolution solution, Suffix suffix, TextRange resultRange, string targetText, int caretOffset) { solution.GetPsiServices().CommitAllDocuments(); var expressions = TextControlToPsi .GetSelectedElements<ICSharpExpression>(solution, textControl.Document, resultRange); foreach (var expression in expressions) { AcceptExpression(textControl, solution, resultRange, expression); break; } }
protected override void OnAfterComplete(ITextControl textControl, ref TextRange nameRange, ref TextRange decorationRange, TailType tailType, ref Suffix suffix, ref IRangeMarker caretPositionRangeMarker) { // TODO: completion with a space can break this base.OnAfterComplete(textControl, ref nameRange, ref decorationRange, tailType, ref suffix, ref caretPositionRangeMarker); if (context != null) { context.CompletionManager.Locks.QueueReadLock("Code completion inside markup extension", () => ExecuteManualCompletion(textControl)); } }
protected override void OnAfterComplete(ITextControl textControl, ref TextRange nameRange, ref TextRange decorationRange, TailType tailType, ref Suffix suffix, ref IRangeMarker caretPositionRangeMarker) { // TODO: completion with a space can break this base.OnAfterComplete(textControl, ref nameRange, ref decorationRange, tailType, ref suffix, ref caretPositionRangeMarker); if (context != null) { context.CompletionManager.Locks.QueueReadLock("Code completion inside markup extension", () => context.CompletionManager.ExecuteManualCompletion( CodeCompletionType.AutomaticCompletion, textControl, context.Solution, EmptyAction.Instance, context.CompletionManager.GetPrimaryEvaluationMode(CodeCompletionType.AutomaticCompletion), AutocompletionBehaviour.DoNotAutocomplete)); } }
protected override void AfterCompletion( ITextControl textControl, ISolution solution, Suffix suffix, TextRange resultRange, string targetText, int caretOffset) { var placeholders = new List<TextRange>(); for (var index = 0;; index++) { index = targetText.IndexOf(NamePlaceholder, index, StringComparison.Ordinal); if (index == -1) break; var range = new TextRange(resultRange.StartOffset + index); placeholders.Add(range.ExtendRight(NamePlaceholder.Length)); } if (placeholders.Count == 0) { base.AfterCompletion(textControl, solution, suffix, resultRange, targetText, caretOffset); } else { var nameField = new TemplateField("name", new NameSuggestionsExpression(myNames), 0); var hotspotInfo = new HotspotInfo(nameField, placeholders); var endRange = new TextRange(resultRange.StartOffset + caretOffset); var session = LiveTemplatesManager.Instance.CreateHotspotSessionAtopExistingText( mySolution, endRange, textControl, LiveTemplatesManager.EscapeAction.LeaveTextAndCaret, hotspotInfo); if (!suffix.IsEmpty) { session.HotspotUpdated += delegate { if (session.IsFinished) suffix.Playback(textControl); }; } session.Execute(); } }
public void Accept( ITextControl textControl, TextRange nameRange, LookupItemInsertType lookupItemInsertType, Suffix suffix, ISolution solution, bool keepCaretStill) { if (!myReplaceRange.IsValid || !myExpressionRange.IsValid) return; var replaceRange = myReplaceRange.Intersects(nameRange) ? new TextRange(myReplaceRange.StartOffset, nameRange.EndOffset) : myReplaceRange; var expressionText = textControl.Document.GetText(myExpressionRange); var targetText = myReplaceTemplate.Replace("$EXPR$", expressionText); var caretOffset = targetText.IndexOf("$CARET$", StringComparison.Ordinal); if (caretOffset == -1) caretOffset = targetText.Length; else targetText = targetText.Replace("$CARET$", string.Empty); textControl.Document.ReplaceText(replaceRange, targetText); var range = TextRange.FromLength(replaceRange.StartOffset, targetText.Length); AfterCompletion(textControl, solution, suffix, range, targetText, caretOffset); }
public void Accept(ITextControl textControl, TextRange nameRange, LookupItemInsertType lookupItemInsertType, Suffix suffix, ISolution solution, bool keepCaretStill) { Item.Accept(textControl, nameRange, lookupItemInsertType, suffix, solution, keepCaretStill); }
public TokenFloat(Double value, Suffix suffix, String source) : base(TokenType.FLOAT) { this.value = value; this.suffix = suffix; this.source = source; }
public static Suffix Parse(string input) { var suffix = new Suffix(); suffix.Value = string.Concat("_", input.Length, "_"); suffix.TotalLength = input.Length; return suffix; }
public TokenInt(Int64 _val, Suffix _suffix, String _raw) : base(TokenType.INT) { val = _val; suffix = _suffix; raw = _raw; }
// When a suffix ends on an implicit node, adding a new character // means I have to split an existing edge. This function is called // to split an edge at the point defined by the Suffix argument. // The existing edge loses its parent, as well as some of its leading // characters. The newly created edge descends from the original // parent, and now has the existing edge as a child. // // Since the existing edge is getting a new parent and starting // character, its hash table entry will no longer be valid. That's // why it gets removed at the start of the function. After the parent // and start char have been recalculated, it is re-inserted. // The number of characters stolen from the original node and given // to the new node is equal to the number of characters in the suffix // argument, which is last - first + 1; public int SplitEdge(Suffix s) { Remove(); Edge new_edge = new Edge(first_char_index, first_char_index + s.last_char_index - s.first_char_index, s.origin_node); new_edge.Insert(); //SuffTree.FindNode(new_edge.end_node).suffix_node = s.origin_node; SuffTree.Nodes[new_edge.end_node].suffix_node = s.origin_node; first_char_index += s.last_char_index - s.first_char_index + 1; start_node = new_edge.end_node; Insert(); return new_edge.end_node; }
public void MakeTree(string T, double minThreshold) { Console.WriteLine("Started: " + DateTime.Now.ToString()); this.minThreshold = minThreshold; this.T = T; this.N = T.Length; // might be T.Length - 1; Node.Count = 1; Suffix.T = T; Edge.T = T; Nodes = new Node[N * 2]; int prime = (new Prime((int)((N * 2) + (N * 2 * 0.1)))).next(); Edge.HASH_TABLE_SIZE = prime; Edge.Edges = new Edge[prime]; InitializeNodesAndEdges(); // The active point is the first non-leaf suffix in the // tree. We start by setting this to be the empty string // at node 0. The AddPrefix() function will update this // value after every new prefix is added. Suffix active = new Suffix(0, 0, -1); // The initial active prefix for (int i = 0; i < N; i++) { AddPrefix(active, i); } Console.WriteLine("Tree Done: " + DateTime.Now.ToString()); }
// // This routine constitutes the heart of the algorithm. // It is called repetitively, once for each of the prefixes // of the input string. The prefix in question is denoted // by the index of its last character. // // At each prefix, we start at the active point, and add // a new edge denoting the new last character, until we // reach a point where the new edge is not needed due to // the presence of an existing edge starting with the new // last character. This point is the end point. // // Luckily for use, the end point just happens to be the // active point for the next pass through the tree. All // we have to do is update it's last_char_index to indicate // that it has grown by a single character, and then this // routine can do all its work one more time. // public void AddPrefix(Suffix active, int last_char_index) { int parent_node; int last_parent_node = -1; for (; ; ) { Edge edge = new Edge(); parent_node = active.origin_node; // Step 1 is to try and find a matching edge for the given node. // If a matching edge exists, we are done adding edges, so we break // out of this big loop. if (active.Explicit()) { edge = Edge.Find(active.origin_node, T[last_char_index]); //if (edge != null) break; if (edge.start_node != -1) break; } else { //implicit node, a little more complicated edge = Edge.Find(active.origin_node, T[active.first_char_index]); int span = active.last_char_index - active.first_char_index; if (T[edge.first_char_index + span + 1] == T[last_char_index]) break; parent_node = edge.SplitEdge(active); } // We didn't find a matching edge, so we create a new one, add // it to the tree at the parent node position, and insert it // into the hash table. When we create a new node, it also // means we need to create a suffix link to the new node from // the last node we visited. Edge new_edge = new Edge(last_char_index, N - 1, parent_node); new_edge.Insert(); if (last_parent_node > 0) { //Node n = new Node(); /****** new edition ******* //n.idx = last_parent_node; //n.suffix_node = parent_node; */ //Nodes.Add(n); Nodes[last_parent_node].suffix_node = parent_node; } last_parent_node = parent_node; // This final step is where we move to the next smaller suffix if (active.origin_node == 0) active.first_char_index++; else { //active.origin_node = FindNode(active.origin_node).suffix_node; active.origin_node = Nodes[active.origin_node].suffix_node; } active.Canonize(); } if (last_parent_node > 0) { //Node n = new Node(); /******* New Edition ************ n.idx = last_parent_node; n.suffix_node = parent_node; */ //Nodes.Add(n); Nodes[last_parent_node].suffix_node = parent_node; } active.last_char_index++; //Now the endpoint is the next active point active.Canonize(); }
public void Accept( ITextControl textControl, TextRange nameRange, LookupItemInsertType insertType, Suffix suffix, ISolution solution, bool keepCaretStill) { // find target expression after code completion var expressionRange = myExpressionRange.Range; if (myWasReparsed) { textControl.Document.ReplaceText(nameRange, "__"); solution.GetPsiServices().CommitAllDocuments(); nameRange = TextRange.FromLength(nameRange.StartOffset, 2); } var expression = (ICSharpExpression) FindMarkedNode( solution, textControl, expressionRange, nameRange, typeof(ICSharpExpression)); if (expression == null) { // still can be parsed as IReferenceName var referenceName = (IReferenceName) FindMarkedNode( solution, textControl, expressionRange, nameRange, typeof(IReferenceName)); if (referenceName == null) return; // reparse IReferenceName as ICSharpExpression var factory = CSharpElementFactory.GetInstance(referenceName.GetPsiModule(), false); expression = factory.CreateExpression(referenceName.GetText()); } // take required component while tree is valid var psiModule = expression.GetPsiModule(); var reference = FindMarkedNode( solution, textControl, myReferenceRange.Range, nameRange, myReferenceType); // Razor.{caret} case if (reference == expression && myWasReparsed) { var parentReference = reference.Parent as IReferenceExpression; if (parentReference != null && parentReference.NameIdentifier.Name == "__") reference = parentReference; } // calculate textual range to remove var replaceRange = CalculateRangeToRemove(nameRange, expression, reference); // fix "x > 0.if" to "x > 0" ICSharpExpression expressionCopy; if (reference != null && expression.Contains(reference)) { expressionCopy = FixExpression(expression, reference); } else { expressionCopy = expression.IsPhysical() ? expression.Copy(expression) : expression; } Assertion.Assert(!expressionCopy.IsPhysical(), "expressionCopy is physical"); ExpandPostfix( textControl, suffix, solution, replaceRange, psiModule, expressionCopy); }