private void DoOneJob(jobQ thejob){ try{ #region JOBRECON_DIR if (thejob.jobtype.Equals("_dir_recon_")){ statusBar1.Text="Testing directory "+thejob.location+" on host "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); bool istrue=testRequest(thejob.targethost, thejob.targetport, buildRequest(thejob.location,"","",thejob.header), (int)updownTimeOut.Value, false, thejob.isSSL, thejob.header, Convert.ToDouble(NUPDOWNBackEnd.Value)); if (istrue == true) { //we also need to feed this back for recursive testing! detailedRequest feedback = new detailedRequest(); feedback.port=thejob.targetport; feedback.host=thejob.targethost; feedback.isSSL=thejob.isSSL; feedback.header=thejob.header; feedback.URL=thejob.location+"/"; feedback.filename=""; feedback.filetype=""; do_Recon(feedback); //ok add the treenode string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location; newdis.protocol=proto; newdis.mode=2; newdis.header=thejob.header; lock (discovered_goods){ discovered_goods.Add(newdis); } } lock (this){ jobstodo--; } } #endregion #region JOB_CHECK_INDEX //do reconjob if (thejob.jobtype.Equals("_check_index_")){ statusBar1.Text="Checking indexability for "+thejob.location+" on "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); bool isindex=checkIndexability(thejob); if (isindex){ string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location+"[indexable]"; newdis.protocol=proto; newdis.mode=3; newdis.header=thejob.header; discovered_goods.Add(newdis); } lock (this){ jobstodo--; } } #endregion #region JOBRECON_FILE if (thejob.jobtype.Equals("__file_recon__")){ string displaytext="Testing file /"+thejob.location+"/"+thejob.filename+"."+thejob.ext+" on host "+thejob.targethost+" SSL is "+thejob.isSSL.ToString(); statusBar1.Text=displaytext.Replace("//","/").Replace("///","/"); bool istrue=testRequest(thejob.targethost, thejob.targetport, buildRequest(thejob.location,thejob.filename,thejob.ext,thejob.header), (int)updownTimeOut.Value, true, thejob.isSSL, thejob.header, Convert.ToDouble(NUPDOWNBackEnd.Value)); if (istrue == true) { //ok add the treenode string proto; if (thejob.isSSL){ proto="https://"; } else { proto="http://"; } discovered newdis = new discovered(); newdis.host=thejob.targethost; newdis.isSSL=thejob.isSSL; newdis.port=thejob.targetport; newdis.URL=thejob.location+"/"+thejob.filename+"."+thejob.ext; newdis.protocol=proto; newdis.mode=4; newdis.header=thejob.header; lock (discovered_goods){ discovered_goods.Add(newdis); } } lock (this){ jobstodo--; } } #endregion } catch(Exception ex) { MessageBox.Show(ex.ToString()); } }
private void btnMangleAddUserInput_Click(object sender, System.EventArgs e) { detailedRequest workreq = new detailedRequest(); Mangled workman = new Mangled(); workreq.header=""; workreq.action="UsrDef"; workreq.cookie=null; //this here is a 'hack'..hehehehe - see the delete code workreq.filename=txtMangleUserInput.Text; workreq.filetype=""; workreq.GETparameters=null; workreq.isSSL=false; workreq.port="0"; workreq.POSTparameters=null; workreq.URL="UsrDef"; workreq.host="UsrDef"; workreq.Processed=new ArrayList(); workman.type="UsrDef"; workman.varname="UsrDef"; workman.varval=txtMangleUserInput.Text; workman.varvalmd5=GetMangle(workman.varval,0); workman.varvalsha1=GetMangle(workman.varval,1); workman.varvalbase64dec=GetMangle(workman.varval,2); workman.varvalbase64enc=GetMangle(workman.varval,3); workreq.Processed.Add(workman); userMange_detailed_Requests.Add(workreq); lstMangleUserInput.Items.Add(txtMangleUserInput.Text); }
private void get_Recon_SmartFile(detailedRequest work){ //i hate comboboxes...i hate hate them string selectedhost=""; try{ selectedhost=cmbReconTargetHost.SelectedItem.ToString(); } catch {} if (work.host.Equals(selectedhost) || chkDoRecon.Checked){ if (chkSmartFileDeep.Checked){ //if its in kn_dirs - then it has been OK-ed before to check here.... foreach (string dir in kn_dirs){ string[] parts = dir.Split(':'); //0-host //1-dir if (parts[0].Equals(work.host)){ //add job jobQ singlejob = new jobQ(); singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; singlejob.location=parts[1]; singlejob.jobtype="__file_recon__"; singlejob.header=clean_partial_header(work.header); singlejob.filename=work.filename; ArrayList totestfor=new ArrayList(); totestfor.AddRange(txtWiktoTestTypes.Lines); foreach (string test_ext in totestfor){ if (test_ext.Equals(work.filetype)==false){ //else we 'discover' the file the guy surfed to anyhow...:) singlejob.ext=test_ext; JOBQ.Add(singlejob); } } } } } if (chkSmartFileShallow.Checked){ //check if the dir is in the black list ArrayList blacklist=new ArrayList(); blacklist.AddRange(txtWiktoSkipDirs.Lines); foreach (string entry in blacklist){ if (work.URL.IndexOf("/"+entry)>=0){ return; } } jobQ singlejob = new jobQ(); singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; //process location..:( string[] dirs=work.URL.Split('/'); string realloc=string.Empty; for (int t=0; t<dirs.Length-1; t++){ realloc+=dirs[t]+"/"; } realloc=realloc.TrimEnd('/'); singlejob.location=realloc; singlejob.jobtype="__file_recon__"; singlejob.header=clean_partial_header(work.header); singlejob.filename=work.filename; foreach (string test_ext in txtWiktoTestTypes.Lines){ if (test_ext.Equals(work.filetype)==false){ //else we 'discover' the file the guy surfed to anyhow...:) singlejob.ext=test_ext; JOBQ.Add(singlejob); } } } } }
private bool get_Recon_Dirs(detailedRequest work){ bool ret=false; work.URL=work.URL.Replace("//","/"); try{ //check if the dir is in the black list ArrayList blacklist=new ArrayList(); blacklist.AddRange(txtWiktoSkipDirs.Lines); foreach (string entry in blacklist){ if (work.URL.IndexOf("/"+entry)>=0){ return false; } } //check if the site is in the black list blacklist=new ArrayList(); blacklist.AddRange(txtReconSkipSites.Lines); foreach (string entry in blacklist){ if (work.host.IndexOf(entry)>=0){ return false; } } string[] dirs=work.URL.Split('/'); string build="/"; //i hate comboboxes...i hate hate them string selectedhost=""; try{ selectedhost=cmbReconTargetHost.SelectedItem.ToString(); } catch {} if (work.host.Equals(selectedhost) || chkDoRecon.Checked){ int subtract=1; if (work.URL.IndexOf(".")<0){ subtract=0; } for (int t=0; t<dirs.Length-subtract; t++){ build+=dirs[t]+"/"; build=build.Replace("//","/"); string hostanddir=work.host+":"+build; hostanddir=hostanddir.Replace("//","/"); if (kn_dirs.Contains(hostanddir)==false){ kn_dirs.Add(hostanddir); string proto; if (work.isSSL){ // txtReconDirs.Text+="#"; proto="https://"; } else { //txtReconDirs.Text+="+"; proto="http://"; } //AddURLToTreeView(proto+work.host+work.URL.TrimEnd('/'), Mode.one, Reconnodes); // txtReconDirs.Text+=hostanddir.Replace("//","/")+"\r\n"; ret=true; //we need to here add jobs to do.. ArrayList totestfor=new ArrayList(); totestfor.AddRange(txtWiktoTestDirs.Lines); if (chkSmartDirScan.Checked){ foreach (string item in kn_dirs){ string[] partsd = item.Split(':'); string[] dirparts = partsd[1].Split('/'); foreach (string dirpart in dirparts){ if (totestfor.Contains(dirpart)==false && dirpart.Length>0){ totestfor.Add(dirpart); } } } } jobQ singlejob = new jobQ(); singlejob.ext="-NONE-"; singlejob.targethost=work.host; singlejob.targetport=work.port; singlejob.isSSL=work.isSSL; try{ if (work.header.IndexOf("Cookie:")<0 && work.cookie.Count>0){ //we need to restitch the bloody cookies into the header....if they not there already.. string cookieline="Cookie: "; foreach (string item in work.cookie){ cookieline+=item+txtCookieVariableSeparator.Text[0]; } cookieline.TrimEnd(';'); //lets add it right at the top string tempheader=cookieline+"\r\n"+work.header; work.header=tempheader; } }catch{} singlejob.header=clean_partial_header(work.header); // add it for indexability check if (ckhReconIndex.Checked){ singlejob.jobtype="_check_index_"; singlejob.location=build.Replace("//","/"); //give it priority....:) JOBQ.Insert(0,singlejob); } foreach (string dir_item in totestfor){ //add job to recon this dir:: // ////////////////////////// if (chkReconDirMine.Checked){ singlejob.jobtype="_dir_recon_"; singlejob.location=build.Replace("//","/")+dir_item; singlejob.location=singlejob.location.Replace("//","/"); JOBQ.Add(singlejob); } } } } } }catch{} return ret; }
private void do_Recon(detailedRequest work){ //get for recon try{ bool is_new=get_Recon_Dirs(work); if (kn_exts.Contains(work.filetype)==false && work.filetype.Length>0){ kn_exts.Add(work.filetype); // txtReconExts.Text+=work.filetype+"\r\n"; is_new=true; } if (kn_filenames.Contains(work.filename)==false&& work.filename.Length>0){ get_Recon_SmartFile(work); kn_filenames.Add(work.filename); // txtReconFilenames.Text+=work.filename+"\r\n"; is_new=true; } if (kn_hosts.Contains(work.host)==false&& work.host.Length>0){ kn_hosts.Add(work.host); cmbReconTargetHost.Items.Add(work.host); is_new=true; } if (is_new){ } } catch( Exception ex) { // MessageBox.Show("Not there............"+ex.ToString()); // Thread.Sleep(1000); } }
private detailedRequest getHTTPdetails(string request,string host, bool isSSL){ detailedRequest result = new detailedRequest(); result.POSTparameters=new System.Collections.ArrayList(); result.GETparameters=new System.Collections.ArrayList(); result.Processed=new ArrayList(); result.cookie=new System.Collections.ArrayList(); //assume no XML result.isXML=false; result.isMultiPart=false; try{ if (request.Length<3){ return result; } string[] lines = request.Replace("\r\n","\n").Split('\n'); string[] parts = lines[0].Split(' '); result.isSSL=isSSL; //action result.action=parts[0]; //URL string[] actionpar=parts[1].Split(txtBaseURLSeparator.Text[0]); string[] URLparts = actionpar[0].Replace("http://","").Split('/'); result.URL="/"; for (int y=1; y<URLparts.Length; y++){ if (URLparts[y].Length>=0){ result.URL+=URLparts[y]+"/"; } } if (actionpar[0].IndexOf(".")>=0 || actionpar[0].EndsWith("/")==false){ result.URL=result.URL.TrimEnd('/'); } result.URL=result.URL.Replace("//","/"); if (result.URL.Equals("")){ result.URL="/"; } //Filetype string[] types=result.URL.Split('/'); string[] ftypes=types[types.Length-1].Split('.'); if (ftypes.Length>=2){ result.filetype=ftypes[ftypes.Length-1]; } else { result.filetype="none"; } //ok here we split the host and the port... if (host.IndexOf(":")>=0){ string[] portparts=host.Split(':'); result.host=portparts[0]; result.port=portparts[1]; } else { result.host=host; if (isSSL){ result.port="443"; } else { result.port="80"; } } //filename string temp=ftypes[0]; string[] dirs=temp.Split('/'); result.filename=dirs[dirs.Length-1]; //GET parameters if (actionpar.Length>=2){ string[] parparts=actionpar[1].Split(txtVariableSeparator.Text[0]); foreach (string item in parparts){ if (item.Length>1){ result.GETparameters.Add(item); result.Processed.Add(getMangleSet(item,"GET",txtKeyValueSeparator.Text[0])); } } } //header int u=1; for (u=1; u<lines.Length; u++){ if (lines[u].Length<3){ break; } //cookie if (lines[u].StartsWith("Cookie: ")){ string moo=lines[u].Replace("Cookie: ","").Replace(txtCookieKeyValueSeparator.Text[0]+" ",txtCookieKeyValueSeparator.Text); string[] work=moo.Split(txtCookieVariableSeparator.Text[0]); foreach (string item in work){ if (item.Length>1){ result.cookie.Add(item.TrimStart(' ')); result.Processed.Add(getMangleSet(item.TrimStart(' '),"Cookie",txtCookieKeyValueSeparator.Text[0])); } } } else { result.header+=lines[u]+"\r\n"; } } //POST parameters //check if its multi part form...:| if (result.header.IndexOf("Content-Type: multipart/form-data")>=0){ //its multi part - welcome to hell - for now, we'll handle it as a blob result.isMultiPart=true; string allMulti=""; for (int y=(u+1); y<lines.Length; y++){ allMulti+=lines[y]+"\r\n"; } result.POSTparameters.Add(allMulti); result.Processed.Add(getMangleSet(allMulti,"POST",txtKeyValueSeparator.Text[0])); } else { //first check if its XML if (lines[u+1].StartsWith("<?xml")){ //find the end of the XML result.isXML=true; string allXML=""; for (int y=(u+1); y<lines.Length; y++){ //if (lines[y].Length<=0){ // break; //} else { allXML+=lines[y]; //} } result.POSTparameters.Add(allXML); result.Processed.Add(getMangleSet(allXML,"POST",txtKeyValueSeparator.Text[0])); } else { //normal POST //get the rest of the post..if there is more! string restofpost=string.Empty; if (u+1<=lines.Length-3){ for (int o=u+1; o<lines.Length; o++){ restofpost+=lines[o]+"\r\n"; } } else { restofpost=lines[u+1]; } string[] postPar = restofpost.Split(txtVariableSeparator.Text[0]); foreach (string oitem in postPar){ string item=oitem.Replace("\0",""); if (item.Length > 1){ result.POSTparameters.Add(item); result.Processed.Add(getMangleSet(item,"POST",txtKeyValueSeparator.Text[0])); } } } } return result; } catch {return result;} }
private bool ApplyBigFilter(detailedRequest passed, HTTPRequest Hpassed){ if (bypassfiltercompletely){ return false; } //assume its good bool matchfilterh = false; //compare hosts if (currentFilter.Hosts.Count>0){ foreach (string item in currentFilter.Hosts){ if (passed.host.IndexOf(item)>=0){ matchfilterh=true; break; } } } else {matchfilterh=true;} //compare locations bool matchfilterl = false; if (currentFilter.Locations.Count>0){ foreach (string item in currentFilter.Locations){ if (passed.URL.IndexOf(item)>=0){ matchfilterl=true; break; } } }else { matchfilterl=true; } //compare actions bool matchfiltera = false; if (currentFilter.Actions.Count>0){ foreach (string item in currentFilter.Actions){ if (passed.action.IndexOf(item)>=0){ matchfiltera=true; break; } } } else { matchfiltera=true; } //compare ext bool matchfiltere=false; if (currentFilter.Ext.Count>0){ foreach (string item in currentFilter.Ext){ if (passed.filetype.IndexOf(item)>=0){ matchfiltere=true; //break; } } } else { matchfiltere=true; } //compare cookies bool matchfilterc=false; if (currentFilter.Cookies.Count>0){ foreach (string item in currentFilter.Cookies){ foreach (string Citem in passed.cookie){ if (Citem.IndexOf(item)>=0){ matchfilterc=true; //break; } } } } else { matchfilterc=true; } //compare parameters bool matchfilterpg = false; bool matchfilterpp = false; if (anyPostorGet==false){ foreach (string item in currentFilter.Parameters){ foreach (string Gitem in passed.GETparameters){ if (Gitem.IndexOf(item)>=0){ matchfilterpg=true; //break; } } if (matchfilterpg){ break; } } if (currentFilter.Parameters.Count<=0){ matchfilterpg=true; } // POST foreach (string item in currentFilter.Parameters){ foreach (string Pitem in passed.POSTparameters){ if (Pitem.IndexOf(item)>=0){ matchfilterpp=true; //break; } } } if (currentFilter.Parameters.Count<=0){ matchfilterpp=true; } } else { if (passed.GETparameters.Count>0 || passed.POSTparameters.Count>0){ matchfilterpg=true; matchfilterpp=true; } } //request bool matchfilterReq=false; if (currentFilter.RequestHeader.Count>0){ foreach (string item in currentFilter.RequestHeader){ if (Hpassed.header.IndexOf(item)>=0){ matchfilterReq=true; } } } else { matchfilterReq=true; } //response bool matchfilterRes=false; if (currentFilter.ResponseHeader.Count>0){ foreach (string item in currentFilter.ResponseHeader){ if (Hpassed.response.IndexOf(item)>=0){ matchfilterRes=true; } } } else { matchfilterRes=true; } //isSSL bool mfSSL=false; if (currentFilter.IsHTTPS && Hpassed.isSSL){ mfSSL=true; } //is HTTP bool mfHTTP=false; if (currentFilter.IsHTTP && !Hpassed.isSSL){ mfHTTP=true; } matchfiltere=matchfiltere^currentFilter.inExt; matchfiltera=matchfiltera^currentFilter.inActions; matchfilterl=matchfilterl^currentFilter.inLocations; matchfilterpg=matchfilterpg^currentFilter.inParameters; matchfilterpp=matchfilterpp^currentFilter.inParameters; matchfilterh=matchfilterh^currentFilter.inHost; matchfilterc=matchfilterc^currentFilter.inCookies; matchfilterReq=matchfilterReq^currentFilter.inRequests; matchfilterRes=matchfilterRes^currentFilter.inResponse; if (currentFilter.inParameters){ return !((mfSSL || mfHTTP) && matchfilterReq && matchfilterRes && matchfilterc && matchfiltere && matchfiltera && matchfilterh && matchfilterl && (matchfilterpg && matchfilterpp)); } else { return !((mfSSL || mfHTTP) && matchfilterReq && matchfilterRes && matchfilterc && matchfiltere && matchfiltera && matchfilterh && matchfilterl && (matchfilterpg || matchfilterpp)); } }