private void AddDirectoryForDirectoryscan(object sender, System.EventArgs e){ try{ TreeNode current = treeRecon.GetNodeAt(MouseX, MouseY); jobQ singlejob = new jobQ(); string FP=current.FullPath; string templocation=string.Empty; //host string[] FP_parts=FP.Split('\\'); singlejob.targethost=FP_parts[0]; string temp=""; for (int t=1; t<FP_parts.Length; t++){ temp+=FP_parts[t]+"/"; } temp=temp.TrimEnd('/'); templocation="/"+temp; templocation=templocation.Replace("//","/"); TreeTagType rectag = new TreeTagType(); rectag=(TreeTagType)current.Tag; //isSSL singlejob.isSSL = rectag.isSSL; singlejob.targetport=rectag.port; singlejob.ext="-NONE-"; singlejob.jobtype="_dir_recon_"; singlejob.header=clean_partial_header(rectag.header); ArrayList totestfor=new ArrayList(); totestfor.AddRange(txtWiktoTestDirs.Lines); if (chkSmartDirScan.Checked){ foreach (string item in kn_dirs){ string[] partsd = item.Split(':'); string[] dirparts = partsd[1].Split('/'); foreach (string dirpart in dirparts){ if (totestfor.Contains(dirpart)==false && dirpart.Length>0){ totestfor.Add(dirpart); } } } } foreach (string dir in totestfor){ singlejob.location=templocation+"/"+dir; singlejob.location=singlejob.location.Replace("//","/"); JOBQ.Add(singlejob); } } catch{} }
private void button1_Click(object sender, System.EventArgs e) { jobQ nowjob = new jobQ(); nowjob=(jobQ)JOBQ[0]; JobThread(nowjob); JOBQ.Remove(nowjob); }
private void DoOneJob(jobQ thejob){ try{ #region JOBRECON_DIR if (thejob.jobtype.Equals("_dir_recon_")){ statusBar1.Text="Testing directory "+thejob.location+" on host "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); bool istrue=testRequest(thejob.targethost, thejob.targetport, buildRequest(thejob.location,"","",thejob.header), (int)updownTimeOut.Value, false, thejob.isSSL, thejob.header, Convert.ToDouble(NUPDOWNBackEnd.Value)); if (istrue == true) { //we also need to feed this back for recursive testing! detailedRequest feedback = new detailedRequest(); feedback.port=thejob.targetport; feedback.host=thejob.targethost; feedback.isSSL=thejob.isSSL; feedback.header=thejob.header; feedback.URL=thejob.location+"/"; feedback.filename=""; feedback.filetype=""; do_Recon(feedback); //ok add the treenode string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location; newdis.protocol=proto; newdis.mode=2; newdis.header=thejob.header; lock (discovered_goods){ discovered_goods.Add(newdis); } } lock (this){ jobstodo--; } } #endregion #region JOB_CHECK_INDEX //do reconjob if (thejob.jobtype.Equals("_check_index_")){ statusBar1.Text="Checking indexability for "+thejob.location+" on "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); bool isindex=checkIndexability(thejob); if (isindex){ string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location+"[indexable]"; newdis.protocol=proto; newdis.mode=3; newdis.header=thejob.header; discovered_goods.Add(newdis); } lock (this){ jobstodo--; } } #endregion #region JOBRECON_FILE if (thejob.jobtype.Equals("__file_recon__")){ string displaytext="Testing file /"+thejob.location+"/"+thejob.filename+"."+thejob.ext+" on host "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); statusBar1.Text=displaytext.Replace("//","/").Replace("///","/"); bool istrue=testRequest(thejob.targethost, thejob.targetport, buildRequest(thejob.location,thejob.filename,thejob.ext,thejob.header), (int)updownTimeOut.Value, true, thejob.isSSL, thejob.header, Convert.ToDouble(NUPDOWNBackEnd.Value)); if (istrue == true) { //ok add the treenode string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location+"/"+thejob.filename+"."+thejob.ext; newdis.protocol=proto; newdis.mode=4; newdis.header=thejob.header; lock (discovered_goods){ discovered_goods.Add(newdis); } } lock (this){ jobstodo--; } } #endregion } catch(Exception ex) { MessageBox.Show(ex.ToString()); } }
public bool checkIndexability(jobQ thejob){ string response=""; if (thejob.isSSL){ response = sendraw(thejob.targethost,thejob.targetport,"GET "+thejob.location+" HTTP/1.0\r\n"+thejob.header+"\r\n",4096,(int)updownTimeOut.Value,3,true); } else { response = sendraw(thejob.targethost,thejob.targetport,"GET "+thejob.location+" HTTP/1.0\r\n"+thejob.header+"\r\n",4096,(int)updownTimeOut.Value); } if (response.IndexOf("ndex of")>=0 || response.IndexOf("To Parent Directory")>=0){ return true; } else { return false; } return false; }
public void JobThread(jobQ thejob) { DelegateJob delJob= new DelegateJob(DoOneJob); AsyncCallback callBackWhenDone = new AsyncCallback(this.EndJob); delJob.BeginInvoke(thejob,callBackWhenDone,null); }
//we do work here... private void timer2_Tick(object sender, System.EventArgs e) { //eish if (jobstodo>=0){ lblCurrentJobQ.Text=jobstodo.ToString(); } else { lblCurrentJobQ.Text="0"; } lock((object)jobstodo){ if (jobstodo>6){ return; } } jobQ nowjob = new jobQ(); lblJobQLength.Text=JOBQ.Count.ToString(); if (JOBQ.Count>0){ lock ((object)jobstodo){ jobstodo++; } nowjob=(jobQ)JOBQ[0]; JobThread(nowjob); //ok here the job is done so take it off the queue JOBQ.Remove(nowjob); } }
//this is for adding a file finding job. private void treeRecon_AfterCheck(object sender, System.Windows.Forms.TreeViewEventArgs e) { //lets see TreeNode current = e.Node; jobQ singlejob = new jobQ(); string FP=current.FullPath; //host string[] FP_parts=FP.Split('\\'); singlejob.targethost=FP_parts[0]; string temp=""; for (int t=1; t<FP_parts.Length; t++){ temp+=FP_parts[t]+"/"; } temp=temp.TrimEnd('/'); singlejob.location="/"+temp; singlejob.location=singlejob.location.Replace("//","/"); TreeTagType rectag = new TreeTagType(); rectag=(TreeTagType)current.Tag; //isSSL singlejob.isSSL = rectag.isSSL; singlejob.targetport=rectag.port; singlejob.jobtype="__file_recon__"; singlejob.header=clean_partial_header(rectag.header); if (current.Checked){ foreach (string filetype in txtWiktoTestTypes.Lines){ foreach (string filename in txtWiktoTestFilenames.Lines){ singlejob.ext=filetype; singlejob.filename=filename; JOBQ.Add(singlejob); } } } else { //remove it from the Q lock (JOBQ){ ArrayList kaas = new ArrayList(); kaas.AddRange(JOBQ.GetRange(0,JOBQ.Count)); foreach (jobQ item in kaas){ if (item.targethost.Equals(singlejob.targethost) && item.location.Equals(singlejob.location) && item.targetport.Equals(singlejob.targetport) && item.isSSL==singlejob.isSSL){ //remove it JOBQ.Remove(item); } } } } }
private void get_Recon_SmartFile(detailedRequest work){ //i hate comboboxes...i hate hate them string selectedhost=""; try{ selectedhost=cmbReconTargetHost.SelectedItem.ToString(); } catch {} if (work.host.Equals(selectedhost) || chkDoRecon.Checked){ if (chkSmartFileDeep.Checked){ //if its in kn_dirs - then it has been OK-ed before to check here.... foreach (string dir in kn_dirs){ string[] parts = dir.Split(':'); //0-host //1-dir if (parts[0].Equals(work.host)){ //add job jobQ singlejob = new jobQ(); singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; singlejob.location=parts[1]; singlejob.jobtype="__file_recon__"; singlejob.header=clean_partial_header(work.header); singlejob.filename=work.filename; ArrayList totestfor=new ArrayList(); totestfor.AddRange(txtWiktoTestTypes.Lines); foreach (string test_ext in totestfor){ if (test_ext.Equals(work.filetype)==false){ //else we 'discover' the file the guy surfed to anyhow...:) singlejob.ext=test_ext; JOBQ.Add(singlejob); } } } } } if (chkSmartFileShallow.Checked){ //check if the dir is in the black list ArrayList blacklist=new ArrayList(); blacklist.AddRange(txtWiktoSkipDirs.Lines); foreach (string entry in blacklist){ if (work.URL.IndexOf("/"+entry)>=0){ return; } } jobQ singlejob = new jobQ(); singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; //process location..:( string[] dirs=work.URL.Split('/'); string realloc=string.Empty; for (int t=0; t<dirs.Length-1; t++){ realloc+=dirs[t]+"/"; } realloc=realloc.TrimEnd('/'); singlejob.location=realloc; singlejob.jobtype="__file_recon__"; singlejob.header=clean_partial_header(work.header); singlejob.filename=work.filename; foreach (string test_ext in txtWiktoTestTypes.Lines){ if (test_ext.Equals(work.filetype)==false){ //else we 'discover' the file the guy surfed to anyhow...:) singlejob.ext=test_ext; JOBQ.Add(singlejob); } } } } }
private bool get_Recon_Dirs(detailedRequest work){ bool ret=false; work.URL=work.URL.Replace("//","/"); try{ //check if the dir is in the black list ArrayList blacklist=new ArrayList(); blacklist.AddRange(txtWiktoSkipDirs.Lines); foreach (string entry in blacklist){ if (work.URL.IndexOf("/"+entry)>=0){ return false; } } //check if the site is in the black list blacklist=new ArrayList(); blacklist.AddRange(txtReconSkipSites.Lines); foreach (string entry in blacklist){ if (work.host.IndexOf(entry)>=0){ return false; } } string[] dirs=work.URL.Split('/'); string build="/"; //i hate comboboxes...i hate hate them string selectedhost=""; try{ selectedhost=cmbReconTargetHost.SelectedItem.ToString(); } catch {} if (work.host.Equals(selectedhost) || chkDoRecon.Checked){ int subtract=1; if (work.URL.IndexOf(".")<0){ subtract=0; } for (int t=0; t<dirs.Length-subtract; t++){ build+=dirs[t]+"/"; build=build.Replace("//","/"); string hostanddir=work.host+":"+build; hostanddir=hostanddir.Replace("//","/"); if (kn_dirs.Contains(hostanddir)==false){ kn_dirs.Add(hostanddir); string proto; if (work.isSSL){ // txtReconDirs.Text+="#"; proto="https://"; } else { //txtReconDirs.Text+="+"; proto="http://"; } //AddURLToTreeView(proto+work.host+work.URL.TrimEnd('/'), Mode.one, Reconnodes); // txtReconDirs.Text+=hostanddir.Replace("//","/")+"\r\n"; ret=true; //we need to here add jobs to do.. ArrayList totestfor=new ArrayList(); totestfor.AddRange(txtWiktoTestDirs.Lines); if (chkSmartDirScan.Checked){ foreach (string item in kn_dirs){ string[] partsd = item.Split(':'); string[] dirparts = partsd[1].Split('/'); foreach (string dirpart in dirparts){ if (totestfor.Contains(dirpart)==false && dirpart.Length>0){ totestfor.Add(dirpart); } } } } jobQ singlejob = new jobQ(); singlejob.ext="-NONE-"; singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; try{ if (work.header.IndexOf("Cookie:")<0 && work.cookie.Count>0){ //we need to restitch the bloody cookies into the header....if they not there already.. string cookieline="Cookie: "; foreach (string item in work.cookie){ cookieline+=item+txtCookieVariableSeparator.Text[0]; } cookieline.TrimEnd(';'); //lets add it right at the top string tempheader=cookieline+"\r\n"+work.header; work.header=tempheader; } }catch{} singlejob.header=clean_partial_header(work.header); // add it for indexability check if (ckhReconIndex.Checked){ singlejob.jobtype="_check_index_"; singlejob.location=build.Replace("//","/"); //give it priority....:) JOBQ.Insert(0,singlejob); } foreach (string dir_item in totestfor){ //add job to recon this dir:: // ////////////////////////// if (chkReconDirMine.Checked){ singlejob.jobtype="_dir_recon_"; singlejob.location=build.Replace("//","/")+dir_item; singlejob.location=singlejob.location.Replace("//","/"); JOBQ.Add(singlejob); } } } } } }catch{} return ret; }