static void Main(string[] args) { var blobAdapter = new BlobStorageAdapter("joydivision", "RMfUv/odmgsSdOcLqYfCLUKDIk9QI556Ly+3kxH8M6YgKcXwKMaoHLd6di201F/v1HVxHOjSMBJKPj1DXNSBQw==", "joydivision", true); var webHdfsClient = new WebHDFSClient("hadoop", blobAdapter); // Upload the ozzie config to appPath in order to submit job string source = AppDomain.CurrentDomain.BaseDirectory + @"\Workflow.xml"; string target = "/handsonlabs/app/Workflow.xml".Replace('\\', '/'); Console.WriteLine("Uploading to " + target); var createdFile = webHdfsClient.CreateFile(source, target); createdFile.Wait(); var oozieClient = new OozieHttpClient(AzureCluster, AzureUserName, AzurePassword); string outputPath = AzureStorageVaultRoot + "/handsonlabs/output/oozieJob/" + Guid.NewGuid().ToString("N"); var oozieJobProperties = new OozieJobProperties( AzureUserName, AzureStorageVaultRoot, "headnodehost:9010", AzureStorageVaultRoot + "/handsonlabs/app/Workflow.xml", "", outputPath); var submitJob = oozieClient.SubmitJob(oozieJobProperties.ToDictionary()); string id = HttpClientTools.GetTaskResults(submitJob).Value<string>("id"); oozieClient.StartJob(id); submitJob.Wait(); // Check job status Task<HttpResponseMessage> t1 = oozieClient.GetJobInfo(id); var res = HttpClientTools.GetTaskResults(t1); }
public async Task Download() { //arrange var options = new DbContextOptionsBuilder <StorageContext>() .UseInMemoryDatabase(databaseName: "SavesFile") .Options; var context = new StorageContext(options); var dummyBinaryObject = new BinaryObject { Id = new Guid("e7d99328-b5f0-4356-80df-2dd61a82505c"), Name = "Test Binary Object", OrganizationId = new Guid("04a348ec-2968-4406-bf4a-4a5fda73df00"), ContentType = "Test Content Type", CorrelationEntityId = new Guid("04a348ec-2968-4406-bf4a-4a5fda73df00"), CorrelationEntity = "Test Correlation Entity", StoragePath = @"BinaryObjects\04a348ec-2968-4406-bf4a-4a5fda73df00\TestAPI\e7d99328-b5f0-4356-80df-2dd61a82505c", StorageProvider = "Test Provider", SizeInBytes = 28, HashCode = 55555.ToString() }; Seed(context, dummyBinaryObject); var logger = Mock.Of <ILogger <BinaryObject> >(); var httpContextAccessor = new Mock <IHttpContextAccessor>(); httpContextAccessor.Setup(req => req.HttpContext.User.Identity.Name).Returns(It.IsAny <string>()); var personEmailRepository = Mock.Of <IPersonEmailRepository>(); var organizationMemberRepository = Mock.Of <IOrganizationMemberRepository>(); var usersRepository = Mock.Of <IAspNetUsersRepository>(); var organizationManager = Mock.Of <IOrganizationManager>(); var repo = new BinaryObjectRepository(context, logger, httpContextAccessor.Object); var blobStorageAdapter = new BlobStorageAdapter(repo); var directoryManager = new DirectoryManager(); var fileSystemAdapter = new FileSystemAdapter(directoryManager); var mockOptions = Mock.Of <IOptions <ConfigurationValue> >(); var configuration = new Mock <IConfiguration>(); configuration.Setup(req => req.GetSection(ConfigurationValue.Values)).Returns(It.IsAny <IConfigurationSection>()); var manager = new BinaryObjectManager(blobStorageAdapter, fileSystemAdapter, httpContextAccessor.Object, organizationMemberRepository, organizationManager, configuration.Object, usersRepository); string validBinaryObjectId = dummyBinaryObject.Id.ToString(); string invalidBinaryObjectId = "9999bbf9-9327-48f7-a5e3-36cdfe4eb6a6"; //act var validFile = manager.Download(validBinaryObjectId); var invalidFile = manager.Download(invalidBinaryObjectId); //assert var storagePath = validFile.Result.StoragePath; Assert.True(validFile.Result.StoragePath.Equals("BinaryObjects\\04a348ec-2968-4406-bf4a-4a5fda73df00\\TestAPI\\e7d99328-b5f0-4356-80df-2dd61a82505c")); Assert.Null(invalidFile.Result.StoragePath); }
static void Main(string[] args) { var blobAdapter = new BlobStorageAdapter("joydivision", "RMfUv/odmgsSdOcLqYfCLUKDIk9QI556Ly+3kxH8M6YgKcXwKMaoHLd6di201F/v1HVxHOjSMBJKPj1DXNSBQw==", "joydivision", true); var webHdfsClient = new WebHDFSClient("hadoop", blobAdapter); // Upload the ozzie config to appPath in order to submit job string source = AppDomain.CurrentDomain.BaseDirectory + @"\Workflow.xml"; string target = "/handsonlabs/app/Workflow.xml".Replace('\\', '/'); Console.WriteLine("Uploading to " + target); var createdFile = webHdfsClient.CreateFile(source, target); createdFile.Wait(); var oozieClient = new OozieHttpClient(AzureCluster, AzureUserName, AzurePassword); string outputPath = AzureStorageVaultRoot + "/handsonlabs/output/oozieJob/" + Guid.NewGuid().ToString("N"); var oozieJobProperties = new OozieJobProperties( AzureUserName, AzureStorageVaultRoot, "headnodehost:9010", AzureStorageVaultRoot + "/handsonlabs/app/Workflow.xml", "", outputPath); var submitJob = oozieClient.SubmitJob(oozieJobProperties.ToDictionary()); string id = HttpClientTools.GetTaskResults(submitJob).Value <string>("id"); oozieClient.StartJob(id); submitJob.Wait(); // Check job status Task <HttpResponseMessage> t1 = oozieClient.GetJobInfo(id); var res = HttpClientTools.GetTaskResults(t1); }
static void Main(string[] args) { //need to perform foll substitution to match ur env //replace {Azurestorageaccount} with root name of ur strage accnt var asvstorageaccountname = "shavas300"; var asvkey = "W00+SmVFMQffNUikJpfAa8WOGdhka27fP42dnRG2FhuCxH4Mk7TaN2qGknqMwNjCXvbKCMwHeLJZWByhWiO4sQ=="; var clustername = "shavas100"; var hadoopusername = "******"; var hadoopuserpassword = "******"; // var clusteruri = "https://" + clustername + ".azurehdinsight.net"; //build cluster uri with full address var studentid = "03"; var asvstudentdirectory = "/" + studentid + "_webhdfs/crimeresult"; var hivetablename = studentid + "_crimeinfo_net"; var hivetablepathincontainer = "/" + studentid + "_crimeinfo"; var crimedatafile = "/CityCrimeResultData.csv"; var localfile = "c:/data" + crimedatafile; // blobl storage account detail var asvaccount = asvstorageaccountname + ".blob.core.windows.net"; var asvcontainer = "working"; var clusteruri = "https://" + clustername + ".azurehdinsight.net"; // setup azure storage for the program var storageadapter = new BlobStorageAdapter(asvaccount, asvkey, asvcontainer, true); var HDFSClient = new WebHDFSClient(hadoopusername, storageadapter); Console.WriteLine("Creating directory: " + asvstudentdirectory + " in account: " + asvstorageaccountname + "container: " + asvcontainer); //create directory and wait for the task to complete HDFSClient.CreateDirectory(asvstudentdirectory).Wait(); Console.WriteLine("Copying"); //The below command will copy the files from local directory to azure storage HDFSClient.CreateFile(localfile, asvstudentdirectory + crimedatafile).Wait(); //Create Hive Connection var hiveconnection = new MyHiveDatabase(new System.Uri(clusteruri), hadoopusername, hadoopuserpassword, asvaccount, asvkey); Console.WriteLine("creating hive table " + hivetablename); // Create table on storage string command = "CREATE TABLE " + hivetablename; command += @"( state string, city string, population string, total_criminal_activities string, crime_percentage string ) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE "; command += "LOCATION 'wasb://" + asvcontainer + "@" + asvaccount + hivetablepathincontainer + "';"; Console.WriteLine(command); hiveconnection.ExecuteHiveQuery(command).Wait(); Console.WriteLine(" TABLE CREATED SUCCESSFULLY.... "); Console.WriteLine("LOADING DATA FROM ....." + crimedatafile + " into the '" + hivetablename + "' table in hive ..."); //LOAD DATA FROM FILE TO TABLE command = "LOAD data inpath 'wasb://"; command += asvcontainer + "@" + asvaccount + asvstudentdirectory + crimedatafile + "' OVERWRITE INTO TABLE " + hivetablename + ";"; Console.WriteLine(command); hiveconnection.ExecuteHiveQuery(command).Wait(); Console.WriteLine("Performing Hive Query .... "); //EXECUTE QUERY TO GET DATA FROM CREATED TABLE command = "SELECT * FROM " + hivetablename + ";"; Console.WriteLine(command); var result = hiveconnection.ExecuteQuery(command); //show the output Console.WriteLine("THE RESULTS ARE : {0}", result.Result.ReadToEnd()); Console.WriteLine("\n press any key to continue. "); Console.ReadKey(); }