// Start is called before the first frame update void Start() { if (Crunch.Instance == null) { Crunch.Instance = this; } }
public void GetAllCompaniesCrunchbase() { Crunch crunch = new Crunch(); crunch.DroneDataSource = new CrunchbaseTestDataSource(); crunch.GetAllCompanies(); }
public void SetNextRunInterval_BaseClass() { Crunch crunch = new Crunch(new CrunchbaseTestDataSource()); crunch.SetNextRunIntervalByNode("crunchbase", crunch.Context); Assert.AreEqual(crunch.Context.NextRun, DateTime.Now.Next((DayOfWeek)Enum.Parse(typeof(DayOfWeek), "saturday", true)).Date); }
public void Crunchbase_GetNodeText() { Crunch t = new Crunch(new MarketShareDataSource()); XmlDocument xmlDoc = t.Xml as XmlDocument; string text = XMLUtility.GetTextFromAccountNode(t.Xml, "crunch/interval"); Assert.AreEqual(text, "saturday"); }
public void GetFullCompanyByPermalink() { //@-news.info Crunch crunch = new Crunch(); CrunchbaseDataComponent _dataComponent = new CrunchbaseDataComponent(); _dataComponent.CompanyLocal = crunch.GetFullCompany("What-Is-Your-Monster", new Dig()); Assert.IsNotNull(_dataComponent.CompanyLocal); //Assert.AreEqual("Directi Internet Solutions", cr.records.Registrar); }
public void GetFullCompanyByPermaLinkAndInsertQviaAPI() { //@-news.info Crunch crunch = new Crunch(); CrunchbaseDataComponent _dataComponent = new CrunchbaseDataComponent(); _dataComponent.CompanyLocal = crunch.GetFullCompany("i-App-Creation", new Dig()); crunch.DroneDataSource = new CrunchbaseDataSource(); crunch.DroneDataSource.Process(_dataComponent); Assert.IsNotNull(_dataComponent.CompanyLocal); }
public void GetFullCompanyByPermaLinkAndInsertViaDirectCall() { //@-news.info Crunch crunch = new Crunch(); CrunchbaseDataComponent _dataComponent = new CrunchbaseDataComponent(); _dataComponent.CompanyLocal = crunch.GetFullCompany("i-App-Creation", new Dig()); Drone.QueueProcessor.Datasources.CrunchbaseDataSource cds = new Drone.QueueProcessor.Datasources.CrunchbaseDataSource(); cds.Process(_dataComponent); Assert.IsNotNull(_dataComponent.CompanyLocal); }
public void GetCompanyByIndexAndInsertQueueViaAPI() { List <Company> list = compManager.GetAllCompanies(); int index = list.FindIndex(item => item.permalink == "wetpaint"); Crunch crunch = new Crunch(new CrunchbaseDataSource()); Company co = list.ElementAt(index); CompanyRoot cr = crunch.GetFullCompany(co.permalink, new Dig()); CrunchbaseDataComponent cdc = new CrunchbaseDataComponent(); cdc.CompanyLocal = cr; crunch.DroneDataSource.Process(cdc); Assert.IsNotNull(cr); }
public void DigWRegistrar_PerformanceTest() { //resolver will cache these, so remove the cache lookup Dig dig = new Dig(); Crunch crunch = new Crunch(); List <string> domainList = new List <string>(); List <string> webhostList = new List <string>(); domainList.Add("coderow.com"); domainList.Add("slashcommunity.com"); domainList.Add("vantronix.com"); domainList.Add("sociofy.com"); domainList.Add("netconstructor.com"); domainList.Add("dotfox.com"); domainList.Add("go.co"); domainList.Add("1computer.info"); domainList.Add("andyet.net"); domainList.Add("p1us.me"); domainList.Add("10cms.com"); domainList.Add("1010data.com"); domainList.Add("1800vending.com"); domainList.Add("easybacklog.com"); domainList.Add("abcotechnology.com"); domainList.Add("abcsignup.com"); domainList.Add("airtag.com"); domainList.Add("nuospace.com"); domainList.Add("brightscope.com"); domainList.Add("data180.com"); domainList.Add("chicagolandlordsattorney.com"); DateTime endTime; DateTime startTime = DateTime.Now; Random rand = new Random(); foreach (var item in domainList) { webhostList.Add(dig.GetDNSHostName(item)); } endTime = DateTime.Now; TimeSpan elapsedTime1 = endTime.Subtract(startTime); }
public void GetFoundedDate() { //get all entries with 1/1/1900, name and domain name List <string[]> entries = GetAll1900s(); Crunch crunch = new Crunch(new CrunchbaseTestDataSource()); List <Company> _allCompanies = new CompanyManager().GetAllCompanies(); foreach (string[] item in entries) { try { Company co = _allCompanies.Where(com => com.name.Trim().ToLowerInvariant() == item[1].Trim().ToLowerInvariant()).FirstOrDefault(); if (!Object.Equals(co, null)) { CompanyRoot cor = crunch.GetFullCompany(co.permalink, new Dig()); if (!Object.Equals(cor, null)) { cor.homepage_url = cor.homepage_url.Replace("http://", "").Replace("https://", "").Replace("www.", "").TrimEnd(new char[] { '/' }); if (cor.homepage_url == item[0]) { CrunchbaseDataSource ds = new CrunchbaseDataSource(); CrunchbaseDataComponent _dataComponent = crunch.DroneDataComponent as CrunchbaseDataComponent; if (cor.founded_year != null || cor.founded_month != null || cor.founded_day != null) { _dataComponent.CompanyLocal = cor; ds.Process(_dataComponent); } } } } else { //name lookup failed. } } catch (Exception) { } } }
public void CleanUrl() { Crunch crunch = new Crunch(); //htttp string newURL = Utility.CleanUrl("htttp://aprendelo.com"); Assert.IsTrue(newURL == "aprendelo.com"); //case and trailing / newURL = Utility.CleanUrl("case.syr.edu/incubators/incubator.php"); Assert.IsTrue(newURL == "syr.edu"); //about. newURL = Utility.CleanUrl("about.picsearch.com"); Assert.IsTrue(newURL == "picsearch.com"); //about. newURL = Utility.CleanUrl("about.com"); Assert.IsTrue(newURL == "about.com"); //beta. newURL = Utility.CleanUrl("beta.lt"); Assert.IsTrue(newURL == "beta.lt"); //beta. newURL = Utility.CleanUrl("beta.booklamp.org"); Assert.IsTrue(newURL == "booklamp.org"); //global. newURL = Utility.CleanUrl("global.bose.com/index.html"); Assert.IsTrue(newURL == "bose.com"); //ir. newURL = Utility.CleanUrl("ir.dangdang.com"); Assert.IsTrue(newURL == "dangdang.com"); }
public void DigURL_PerformanceTest() { //resolver will cache these, so remove the cache lookup //dig does a friendly name lookup as well, that can be removed for a VERY slight speed increase over a large # Dig dig = new Dig(); Crunch crunch = new Crunch(); List <string> domainList = new List <string>(); Dictionary <string, string> webhostList = new Dictionary <string, string>(); List <string> errorList = new List <string>(); domainList.Add("mprconsultinghk.com"); domainList.Add("tnipresents.com"); domainList.Add("kraftymoms.com"); domainList.Add("paranique.com"); domainList.Add("eatads.com"); domainList.Add("travellution.com"); domainList.Add("bee.com"); domainList.Add("yahoo.com"); domainList.Add("google.com"); domainList.Add("microsoft.com"); DateTime endTime; DateTime startTime = DateTime.Now; string header = string.Empty; int statCode = 0; startTime = DateTime.Now; Parallel.For(0, 100, (i) => { foreach (var item in domainList) { //try //{ // CheckHead(ref statCode, ref header, item); //} //catch (Exception e) //{ // if (e.Message.Contains("timed out")) // { // try // { // CheckHead(ref statCode, ref header, item); // } // catch (Exception ex) // { // if (ex.Message.Contains("timed out")) // { // errorList.Add("headrequest timed out"); // } // } // } // else // { // errorList.Add(e.Message); // } //} try { //add host to list //if (statCode == 301) // webhostList.Add(dig.GetWebHostName(Utility.CleanUrl(header)), item); //else webhostList[item + i.ToString()] = dig.GetWebHostName(item); } catch (Exception) { webhostList[item] = "webhost timed out"; } } }); endTime = DateTime.Now; TimeSpan elapsedTime1 = endTime.Subtract(startTime); //List<KeyValuePair<string, string>> timeoutList = webhostList.Where(item => item.Key == "headrequest timed out").ToList(); List <KeyValuePair <string, string> > webtimeoutList = webhostList.Where(item => item.Value == "webhost timed out").ToList(); }
static void Main(string[] args) { var data = new byte[512 * 512 * 4]; var r = new Random(); r.NextBytes(data); var memoryData = data.AsMemory(); var mips = new crn_mipmap_params(); var list = new List <List <Memory <byte> > >(); list.Add(new List <Memory <byte> >()); list[0].Add(data.AsMemory()); var result = Crunch.Compress(512, 512, list, crn_format.DXT1, mips); var decompressedData = new List <List <Memory <byte> > >(); Crunch.Decompress(result, decompressedData); var memoryStream = new MemoryStream(); foreach (var d in decompressedData[0]) { var a = d.ToArray(); memoryStream.Write(a, 0, a.Length); } File.WriteAllBytes("Result.bin", memoryStream.ToArray()); //var data = File.ReadAllBytes("test_wood.crn"); //unsafe //{ // fixed(byte* p = data) // { // var info = new crn_texture_info(); // var result = NativeMethods.crnd_get_texture_info(new IntPtr(p), (uint)data.Length, info); // var pointers = new IntPtr[Constants.MAX_FACES]; // var texData = new byte[(info.width * info.height) / 2]; // fixed(byte* tp = texData) // { // pointers[0] = new IntPtr(tp); // var context = NativeMethods.crnd_unpack_begin(new IntPtr(p), (uint)data.Length); // fixed (void* pp = pointers) // { // var unpacked = NativeMethods.crnd_unpack_level(context, new IntPtr(pp), (uint)texData.Length, info.width * 2, 0); // Console.WriteLine("Unpacked: " + unpacked); // } // NativeMethods.crnd_unpack_end(new IntPtr(p)); // } // File.WriteAllBytes("Tex.dxt1", texData); // Console.WriteLine(result); // } //} //var p = new crn_comp_params(); //var imageData = new byte[16 * 16 * 4]; //var r = new Random(); //for(int i = 0; i < imageData.Length; i+= 4) //{ // imageData[i + 0] = (byte)r.Next(); // imageData[i + 1] = (byte)r.Next(); // imageData[i + 2] = (byte)r.Next(); // imageData[i + 3] = 0; //} //unsafe //{ // fixed (byte* data = imageData) // { // p.width = 16; // p.height = 16; // p.images[0, 0] = new IntPtr(data); // float actual_bitrate = -1f; // uint compressed_size = uint.MaxValue; // uint actual_quality_level = uint.MaxValue; // var result = NativeMethods.crn_compress(p, out compressed_size, out actual_quality_level, out actual_bitrate); // var resultData = new byte[compressed_size]; // Marshal.Copy(result, resultData, 0, (int)compressed_size); // File.WriteAllBytes("test.crn", resultData); // Console.Read(); // } //} }
public void GetData() { Crunch crunch = new Crunch(new CrunchbaseDataSource()); crunch.GetData(crunch.Context); }