public override IEnumerable <string> GetKeys(SplitString s) { foreach (string word in s.Split) { yield return(word); } }
protected void ProcessResults(Collection <PSObject> Results, string WorkingDirectory) { Dictionary <string, string> FileToType = new Dictionary <string, string>(); foreach (PSObject Result in Results) { string type = Result.ToString().Substring(0, 1); string resultWithoutType = Result.ToString().Remove(0, 1).Trim(); string fullPath = WorkingDirectory + resultWithoutType; fullPath = fullPath.Replace('/', '\\'); SplitString path = Spliter.Split(fullPath, "."); if (Artifact.TARGET_DIRECTORIES_BY_EXTENSION.ContainsKey(path.Right)) { FileToType.Add(resultWithoutType, type); } else { Console.WriteLine("File not added for deployment: " + Result.ToString()); } } Artifact ProposedArtifact = new Artifact(WorkingDirectory, FileToType); ProposedArtifact.Display(); }
public override IEnumerable <string> GetQueryKeys(SplitString s) { foreach (string word in s.Split) { yield return(word.TrimStart('#')); } }
public override IEnumerable <string> GetQueryKeys(SplitString s) { foreach (string word in s.Split) { yield return(word[0].ToString().ToUpper()); } }
public void When進行字元切割() { var words = ScenarioContext.Current.Get <string>(); string[] actual = SplitString.Solution(words); ScenarioContext.Current.Set(actual); }
private void InsertCoursesSimpleMap(List <string> strDistinctTeacherID) { for (int i = 0; i < strDistinctTeacherID.Count; i++) { List <string> strDD = new List <string>(); strDD = AddSQLStringToDAL.GetDistinctStrings("TabAllCourses", "TimeAndArea", "TeacherID", strDistinctTeacherID[i].ToString());//获取TimeAndArea for (int k = 0; k < strDD.Count; k++) { List <string> strResult = new List <string>(); strResult = SplitString.GetSplitCountAndDetails(strDD[k]); DataTable dt = AddSQLStringToDAL.GetDataTableBysql("select * from TabAllCourses where TeacherID = '" + strDistinctTeacherID[i].ToString() + "' and TimeAndArea = '" + strDD[k].ToString() + "'"); for (int j = 0; j < (strResult.Count / 4); j++) { string WeekRange = SplitString.GetWithoutWeek(strResult[j * 4 + 0].ToString());//如果想用空格隔开,只是在最后加一个空格(以,作为例子试的) string Week = strResult[j * 4 + 1].ToString(); string Time = strResult[j * 4 + 2].ToString(); string Area = strResult[j * 4 + 3].ToString(); string Course = dt.Rows[0]["Course"].ToString().Trim(); if (AddSQLStringToDAL.InsertTabTeachers("TabTeacherCourseSimpleMap", strDistinctTeacherID[i].ToString(), dt.Rows[0]["TeacherName"].ToString(), Course, WeekRange, Week, Time, strDD[k].ToString(), dt.Rows[0]["Class"].ToString(), dt.Rows.Count.ToString(), dt.Rows[0]["TeacherDepartment"].ToString(), dt.Rows[0]["StudentDepartment"].ToString(), Area)) { } } dt.Clear(); } } }
public void ExtendedTest2() { var pairs = SplitString.Solution("abcd"); Assert.IsNotNull(pairs, "solution did not return a value"); Assert.AreEqual(2, pairs.Length, "solution did not return an array with correct number of pairs"); Assert.AreEqual("cd", pairs[1], "last pair in solution is not correct"); }
public void ExtendedTest1() { var pairs = SplitString.Solution("cdabefg"); Assert.IsNotNull(pairs, "solution did not return a value"); Assert.AreEqual(4, pairs.Length, "solution did not return an array with enough pairs"); Assert.AreEqual("cd", pairs[0], "solution did not return pairs with correct values"); Assert.AreEqual("g_", pairs[3], "solution did not return pairs with correct values"); }
public override IEnumerable <string> GetKeys(SplitString s) { Character c = (Character)s; List <Tag> tags = tagGroups.GetTags(c.CodepointHex); foreach (Tag tag in tags) { yield return(tag.TagName[0].ToString().ToUpper()); } }
public async Task <bool> UpdateArticle(UpdateArticleDto updateArticleDto) { int articleId = updateArticleDto.ArticleId; //从数据库查询该Id的文章 var article = await _articleRepository.FirstOrDefaultAsync(a => a.Id == articleId); if (article == null) { //文章不存在 _logger.LogError("文章不存在,更新失败"); return(false); } //获取文章原本的完整的路径 string completePath = article.ArticleUrl; //获取文章新的内容 string newArticleContent = updateArticleDto.ArticleContent; //切割文章内容,去掉首字符串 string articleContentNormal = SplitString.SplitStringWithStart(newArticleContent); //将新的内容更新至文件 var result = await FileOperate.UpdateArticleContentAsync(completePath, articleContentNormal, _logger); if (result == false) { //更新硬盘文档失败 _logger.LogError("从硬盘更新文章失败,更新失败"); return(false); } //硬盘上的文件更新成功,更新数据库 article.ArticleName = updateArticleDto.ArticleName; article.ArticleTags = updateArticleDto.ArticleTags; article.TypeId = updateArticleDto.TypeId; article.LastModificationTime = updateArticleDto.LastModificationTime; article.Introduce = updateArticleDto.Introduce; article.IsRecommend = updateArticleDto.IsRecommend; //更新数据库 Article updateMysqlResult = await _articleRepository.UpdateAsync(article); if (updateMysqlResult == null) { //更新数据库失败,可以尝试重试机制 _logger.LogError("数据库更新失败,更新失败"); return(false); } //更新成功,更新缓存 //将该文章Id和RedisArticle组合,作为Redis键名 string redisArticleKey = PostConsts.ArticleBaseKey + articleId; await _cacheManager.GetCache(PostConsts.RedisForArticleStore).SetAsync(redisArticleKey, updateMysqlResult); return(true); }
public async Task <string> PostArticle([FromBody] NewArticleDto newArticleDto) { //获取到文章后,首先将文章内容存储到对应路径的硬盘上 //文章内容以test-editormd-markdown-doc=开头,格式为Json字符串 //切割文章内容 string splitString = SplitString.SplitStringWithStart(newArticleDto.ArticleContent); if (splitString.IsNullOrWhiteSpace()) { _logger.LogError("文章内容为空"); return("文章内容为空"); } var result = await FileOperate.StoreToTxtFileAsync(splitString, newArticleDto.ArticleName, _logger); if (result != "") { //不为空,表示存储成功,返回了存储路径 newArticleDto.ArticleUrl = result; //AutoMapper映射 Article newArticle = newArticleDto.MapTo <Article>(); //默认浏览次数为1 newArticle.PageView = 1; //存储数据库 try { //存入数据库,并获取文章Id var ArticleId = await _articleRepository.InsertAndGetIdAsync(newArticle); //将该文章Id和RedisArticle组合,作为Redis键名 string redisArticleKey = PostConsts.ArticleBaseKey + ArticleId; //存入Redis缓存 await _cacheManager.GetCache(PostConsts.RedisForArticleStore).SetAsync(redisArticleKey, newArticle); return(ArticleId.ToString()); } catch (Exception ex) { _logger.LogError(exception: ex, "错误信息:" + ex.Message); return("发布文章出现异常"); } } else { _logger.LogError("存储文章到硬盘时出现错误"); return("发布文章失败"); } }
public void Display() { Dictionary <string, List <string> > DestructiveChanges = new Dictionary <string, List <string> >(); foreach (string File in FileToChangeType.Keys) { string fullPath = WorkingDirectory + File; SplitString path = Spliter.Split(fullPath, "."); string splitLocation = placeToSplit[path.Right]; SplitString copyPath = Spliter.Split(fullPath, splitLocation); string targetDirectoryForFile = WorkingDirectory + TARGET_DIRECTORIES_BY_EXTENSION[path.Right] + copyPath.Right; string metaFileSource = fullPath + "-meta.xml"; string metaFileName = copyPath.Right + "-meta.xml"; string targetDirectoryForMetaFile = WorkingDirectory + TARGET_DIRECTORIES_BY_EXTENSION[path.Right] + metaFileName; string changeType = FileToChangeType[File]; if (changeType.Equals("D")) { SplitString extensionSplit = Spliter.Split(copyPath.Right, "."); if (!DestructiveChanges.ContainsKey(extensionSplit.Right)) { DestructiveChanges.Add(extensionSplit.Right, new List <string>()); } DestructiveChanges[extensionSplit.Right].Add(extensionSplit.Left); } Console.WriteLine("Adding " + copyPath.Right + " to deployment"); try { System.IO.File.Copy(fullPath, targetDirectoryForFile); System.IO.File.Copy(metaFileSource, targetDirectoryForMetaFile); } catch (FileNotFoundException ex) { Console.WriteLine("Possible destructive change detected!"); Console.WriteLine(ex.FileName + " not added to artifact."); } } if (DestructiveChanges.Count != 0) { CreateDestructiveChangesXML(DestructiveChanges); } CopyPackageXML(); }
public override object OnDelete(Category request) { ResultEntity <int> result = new ResultEntity <int> { Result = ResultStatic.ResultString.ERROR, ResultCode = ResultStatic.ResultCode.PARAMERROR }; try { List <string> split = new List <string>(); int affectrow = 0; dataCommand = DataCommandManager.GetDataCommand("DeleteCategory_Amber"); if (request.SplitArr != null) { split = new SplitString().bySplit(request.SplitArr, ','); foreach (string i in split) { affectrow = dataCommand.ExecuteNonQuery(new[] { new{ ID = int.Parse(i) } }); } } else { affectrow = dataCommand.ExecuteNonQuery(new[] { new{ ID = request.ID } }); } result.Result = ResultStatic.ResultString.SUCCESS; result.ResultCode = ResultStatic.ResultCode.OK; result.ResultContent = affectrow; return(result); } catch (Exception ex) { result.Result = ResultStatic.ResultString.SERVERERROR; result.ResultCode = ResultStatic.ResultCode.SERVERERROR; result.Error = ex.Message; return(result); //throw; } }
/// <summary> /// The cycle elements. /// </summary> /// <param name="reflex">The reflex.</param> /// <param name="elements">The elements.</param> /// <param name="meta">The meta.</param> /// <param name="offset">The offset.</param> /// <param name="map">The map.</param> /// <param name="TagIndex">The TagIndex.</param> /// <param name="magic">The magic.</param> /// <remarks></remarks> private void CycleElements( ref SplitReflexive reflex, object[] elements, ref Meta meta, int offset, Map map, int TagIndex, int magic) { for (int x = 0; x < reflex.chunkcount; x++) { SplitReflexive chunkreflexive = new SplitReflexive(); map.BR.BaseStream.Position = reflex.realtranslation + (x * reflex.chunksize); chunkreflexive.MS = new MemoryStream(reflex.chunksize); chunkreflexive.MS.Write(map.BR.ReadBytes(reflex.chunksize), 0, reflex.chunksize); chunkreflexive.chunksize = reflex.chunksize; chunkreflexive.splitReflexiveType = SplitReflexive.SplitReflexiveType.Chunk; reflex.Chunks.Add(chunkreflexive); for (int xx = 0; xx < elements.Length; xx++) { IFPIO.BaseObject tempbase = (IFPIO.BaseObject)elements[xx]; switch (tempbase.ObjectType) { case IFPIO.ObjectEnum.Struct: IFPIO.Reflexive tempreflex = (IFPIO.Reflexive)tempbase; SplitReflexive r = new SplitReflexive(); map.BR.BaseStream.Position = offset + tempreflex.offset + (x * reflex.chunksize); r.mapOffset = (int)map.BR.BaseStream.Position; r.chunkcount = map.BR.ReadInt32(); if (r.chunkcount == 0) { continue; } r.chunksize = tempreflex.chunkSize; r.translation = map.BR.ReadInt32() - magic; r.pointstoTagIndex = map.Functions.ForMeta.FindMetaByOffset(r.translation); r.description = tempreflex.name; if (r.pointstoTagIndex == -1) { continue; } // r.parent = reflex.description;// parentname; r.realtranslation = r.translation; r.realTagIndex = r.pointstoTagIndex; r.label = tempreflex.label; r.pointstoTagIndex = meta.TagIndex; r.pointstotagtype = meta.type; r.pointstotagname = meta.name; r.offset = tempreflex.offset; r.intag = meta.TagIndex; r.intagtype = meta.type; r.intagname = meta.name; r.translation -= map.MetaInfo.Offset[r.realTagIndex]; r.inchunknumber = x; r.splitReflexiveType = SplitReflexive.SplitReflexiveType.Container; r.Chunks = new List<SplitReflexive>(); CycleElements( ref r, tempreflex.items, ref meta, r.realtranslation, map, r.pointstoTagIndex, magic); reflex.Chunks[x].ChunkResources.Add(r); meta.reflexivecount++; break; case IFPIO.ObjectEnum.Ident: IFPIO.Ident tempident = (IFPIO.Ident)tempbase; SplitIdent i = new SplitIdent(); map.BR.BaseStream.Position = offset + tempident.offset + (x * reflex.chunksize); i.mapOffset = (int)map.BR.BaseStream.Position; i.ident = map.BR.ReadInt32(); if (i.ident != -1) { try { i.pointstoTagIndex = map.Functions.ForMeta.FindMetaByID(i.ident); i.pointstotagtype = map.MetaInfo.TagType[i.pointstoTagIndex]; i.pointstotagname = map.FileNames.Name[i.pointstoTagIndex]; } catch { continue; } } else { continue; } i.mapOffset = offset + tempident.offset + (x * reflex.chunksize); i.offset = tempident.offset; i.inchunknumber = x; i.intag = TagIndex; i.intagtype = map.MetaInfo.TagType[i.intag]; i.intagname = map.FileNames.Name[i.intag]; i.description = tempident.name; reflex.Chunks[x].ChunkResources.Add(i); break; case IFPIO.ObjectEnum.StringID: IFPIO.SID tempstringid = (IFPIO.SID)tempbase; SplitString si = new SplitString(); map.BR.BaseStream.Position = offset + tempstringid.offset + (x * reflex.chunksize); si.mapOffset = (int)map.BR.BaseStream.Position; si.id = map.BR.ReadUInt16(); if (si.id == 0 | si.id >= map.MapHeader.scriptReferenceCount) { continue; } map.BR.ReadByte(); int temp = map.BR.ReadByte(); if (temp != map.Strings.Length[si.id]) { continue; } si.mapOffset = offset + tempstringid.offset + (x * reflex.chunksize); si.offset = si.mapOffset - map.MetaInfo.Offset[TagIndex]; si.offset = tempstringid.offset; si.inchunknumber = x; si.name = map.Strings.Name[si.id]; si.intag = TagIndex; si.intagtype = map.MetaInfo.TagType[si.intag]; si.intagname = map.FileNames.Name[si.intag]; si.description = tempstringid.name; reflex.Chunks[x].ChunkResources.Add(si); break; } } } return; }
public override IEnumerable <string> GetKeys(SplitString s) { Character c = (Character)s; yield return(c.CodepointHex); }
public override IEnumerable <string> GetQueryKeys(SplitString s) { yield return(s.Unsplit.PadLeft(4, '0')); }
public void BasicTest1() { Assert.AreEqual(new[] { "ab", "c_" }, SplitString.Solution("abc")); }
public abstract IEnumerable <string> GetKeys(SplitString s);
public virtual IEnumerable <string> GetQueryKeys(SplitString s) { return(GetKeys(s)); }
public void BasicTest2() { Assert.AreEqual(new[] { "ab", "cd", "ef" }, SplitString.Solution("abcdef")); }
/// <summary> /// The cycle elements. /// </summary> /// <param name="reflex">The reflex.</param> /// <param name="elements">The elements.</param> /// <param name="meta">The meta.</param> /// <param name="offset">The offset.</param> /// <param name="map">The map.</param> /// <param name="TagIndex">The TagIndex.</param> /// <param name="magic">The magic.</param> /// <remarks></remarks> private void CycleElements( ref SplitReflexive reflex, object[] elements, ref Meta meta, int offset, Map map, int TagIndex, int magic) { for (int x = 0; x < reflex.chunkcount; x++) { SplitReflexive chunkreflexive = new SplitReflexive(); map.BR.BaseStream.Position = reflex.realtranslation + (x * reflex.chunksize); chunkreflexive.MS = new MemoryStream(reflex.chunksize); chunkreflexive.MS.Write(map.BR.ReadBytes(reflex.chunksize), 0, reflex.chunksize); chunkreflexive.chunksize = reflex.chunksize; chunkreflexive.splitReflexiveType = SplitReflexive.SplitReflexiveType.Chunk; reflex.Chunks.Add(chunkreflexive); for (int xx = 0; xx < elements.Length; xx++) { IFPIO.BaseObject tempbase = (IFPIO.BaseObject)elements[xx]; switch (tempbase.ObjectType) { case IFPIO.ObjectEnum.Struct: IFPIO.Reflexive tempreflex = (IFPIO.Reflexive)tempbase; SplitReflexive r = new SplitReflexive(); map.BR.BaseStream.Position = offset + tempreflex.offset + (x * reflex.chunksize); r.mapOffset = (int)map.BR.BaseStream.Position; r.chunkcount = map.BR.ReadInt32(); if (r.chunkcount == 0) { continue; } r.chunksize = tempreflex.chunkSize; r.translation = map.BR.ReadInt32() - magic; r.pointstoTagIndex = map.Functions.ForMeta.FindMetaByOffset(r.translation); r.description = tempreflex.name; if (r.pointstoTagIndex == -1) { continue; } // r.parent = reflex.description;// parentname; r.realtranslation = r.translation; r.realTagIndex = r.pointstoTagIndex; r.label = tempreflex.label; r.pointstoTagIndex = meta.TagIndex; r.pointstotagtype = meta.type; r.pointstotagname = meta.name; r.offset = tempreflex.offset; r.intag = meta.TagIndex; r.intagtype = meta.type; r.intagname = meta.name; r.translation -= map.MetaInfo.Offset[r.realTagIndex]; r.inchunknumber = x; r.splitReflexiveType = SplitReflexive.SplitReflexiveType.Container; r.Chunks = new List <SplitReflexive>(); CycleElements( ref r, tempreflex.items, ref meta, r.realtranslation, map, r.pointstoTagIndex, magic); reflex.Chunks[x].ChunkResources.Add(r); meta.reflexivecount++; break; case IFPIO.ObjectEnum.Ident: IFPIO.Ident tempident = (IFPIO.Ident)tempbase; SplitIdent i = new SplitIdent(); map.BR.BaseStream.Position = offset + tempident.offset + (x * reflex.chunksize); i.mapOffset = (int)map.BR.BaseStream.Position; i.ident = map.BR.ReadInt32(); if (i.ident != -1) { try { i.pointstoTagIndex = map.Functions.ForMeta.FindMetaByID(i.ident); i.pointstotagtype = map.MetaInfo.TagType[i.pointstoTagIndex]; i.pointstotagname = map.FileNames.Name[i.pointstoTagIndex]; } catch { continue; } } else { continue; } i.mapOffset = offset + tempident.offset + (x * reflex.chunksize); i.offset = tempident.offset; i.inchunknumber = x; i.intag = TagIndex; i.intagtype = map.MetaInfo.TagType[i.intag]; i.intagname = map.FileNames.Name[i.intag]; i.description = tempident.name; reflex.Chunks[x].ChunkResources.Add(i); break; case IFPIO.ObjectEnum.StringID: IFPIO.SID tempstringid = (IFPIO.SID)tempbase; SplitString si = new SplitString(); map.BR.BaseStream.Position = offset + tempstringid.offset + (x * reflex.chunksize); si.mapOffset = (int)map.BR.BaseStream.Position; si.id = map.BR.ReadUInt16(); if (si.id == 0 | si.id >= map.MapHeader.scriptReferenceCount) { continue; } map.BR.ReadByte(); int temp = map.BR.ReadByte(); if (temp != map.Strings.Length[si.id]) { continue; } si.mapOffset = offset + tempstringid.offset + (x * reflex.chunksize); si.offset = si.mapOffset - map.MetaInfo.Offset[TagIndex]; si.offset = tempstringid.offset; si.inchunknumber = x; si.name = map.Strings.Name[si.id]; si.intag = TagIndex; si.intagtype = map.MetaInfo.TagType[si.intag]; si.intagname = map.FileNames.Name[si.intag]; si.description = tempstringid.name; reflex.Chunks[x].ChunkResources.Add(si); break; } } } return; }
public void return_same_value_when_input_has_only_two_characters() { const string simpleInput = "ab"; Assert.AreEqual(new string[] { "ab" }, SplitString.Solution(simpleInput)); }
public override IEnumerable <string> GetKeys(SplitString s) { yield return(s.Split[0]); }
public void split_multiple_pairs() { const string simpleOddInput = "abcd"; Assert.AreEqual(new[] { "ab", "cd" }, SplitString.Solution(simpleOddInput)); }
public void AcceptanceTests() { Assert.AreEqual(new string[] { "ab", "c_" }, SplitString.Solution("abc")); Assert.AreEqual(new string[] { "ab", "cd", "ef" }, SplitString.Solution("abcdef")); }
public override IEnumerable <string> GetKeys(SplitString s) { yield return(s.Unsplit[0].ToString()); }
public void add_underscore_when_odd_input() { const string simpleOddInput = "a"; Assert.AreEqual(new[] { "a_" }, SplitString.Solution(simpleOddInput)); }