static void Main(string[] args) { var blobAdapter = new BlobStorageAdapter("joydivision", "RMfUv/odmgsSdOcLqYfCLUKDIk9QI556Ly+3kxH8M6YgKcXwKMaoHLd6di201F/v1HVxHOjSMBJKPj1DXNSBQw==", "joydivision", true); var webHdfsClient = new WebHDFSClient("hadoop", blobAdapter); // Upload the ozzie config to appPath in order to submit job string source = AppDomain.CurrentDomain.BaseDirectory + @"\Workflow.xml"; string target = "/handsonlabs/app/Workflow.xml".Replace('\\', '/'); Console.WriteLine("Uploading to " + target); var createdFile = webHdfsClient.CreateFile(source, target); createdFile.Wait(); var oozieClient = new OozieHttpClient(AzureCluster, AzureUserName, AzurePassword); string outputPath = AzureStorageVaultRoot + "/handsonlabs/output/oozieJob/" + Guid.NewGuid().ToString("N"); var oozieJobProperties = new OozieJobProperties( AzureUserName, AzureStorageVaultRoot, "headnodehost:9010", AzureStorageVaultRoot + "/handsonlabs/app/Workflow.xml", "", outputPath); var submitJob = oozieClient.SubmitJob(oozieJobProperties.ToDictionary()); string id = HttpClientTools.GetTaskResults(submitJob).Value<string>("id"); oozieClient.StartJob(id); submitJob.Wait(); // Check job status Task<HttpResponseMessage> t1 = oozieClient.GetJobInfo(id); var res = HttpClientTools.GetTaskResults(t1); }
public static void SaveFile(string srcFileName, string destFolderName, string destFileName, string uri) { try { //connect to hadoop cluster Uri myUri = new Uri(uri); string userName = "******"; WebHDFSClient myClient = new WebHDFSClient(myUri, userName); //drop destination directory (if exists) myClient.DeleteDirectory(destFolderName, true); //create destination directory myClient.CreateDirectory(destFolderName); string newpathfile = destFolderName + "/" + destFileName; //load file to destination directory var s = myClient.CreateFile(srcFileName, newpathfile); Console.WriteLine(s.Result); //keep command window open until user presses enter Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
static void Main(string[] args) { string filename = ReadFilename(); string hdfsFilename = "/tmp/" + Path.GetFileName(filename) + ".dotnet"; var webhdfs = new WebHDFSClient(ReadWebHDFSAPI(), new NetworkCredential(ReadUsername(), ReadPassword())); Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); Console.WriteLine("Starting to upload file - " + filename); var result = webhdfs.UploadFile( filename, hdfsFilename, overwrite: true ).Result; Console.WriteLine("Upload successful?: " + result); Console.WriteLine("Finished uploading file - " + filename); stopWatch.Stop(); Console.WriteLine("Time Elapsed: " + stopWatch.Elapsed); Console.WriteLine("File info: " + webhdfs.GetFileStatus(hdfsFilename).Result); }
public static void GetDirectoryStatus(string destFolderName, string uri) { try { //connect to hadoop cluster Uri myUri = new Uri(uri); string userName = "******"; WebHDFSClient myClient = new WebHDFSClient(myUri, userName); //list file contents of destination directory Console.WriteLine(); Console.WriteLine("Contents of " + destFolderName); myClient.GetDirectoryStatus(destFolderName).ContinueWith( ds => ds.Result.Files.ToList().ForEach( f => Console.WriteLine("- " + f.PathSuffix) )); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
public override void AcquireConnections(object transaction) { if (ComponentMetaData.RuntimeConnectionCollection[0] != null) { ConnectionManager cm = Microsoft.SqlServer.Dts.Runtime.DtsConvert.GetWrapper(ComponentMetaData.RuntimeConnectionCollection[0].ConnectionManager); HDFSConnectionManager.HDFSConnectionManager connManager = cm.InnerObject as HDFSConnectionManager.HDFSConnectionManager; if (connManager != null) { client = connManager.AcquireConnection(transaction) as WebHDFSClient; } } }
public void Map(IEnumerable<Message> sensorMessages) { var groupedMessages = sensorMessages.GroupBy(msg => msg.DeliveryId); foreach (var group in groupedMessages) { var delivery = group.MapToDelivery(); var client = new WebHDFSClient(new Uri(@"http://127.0.0.1:50070/"), "Camper"); var remotePath = string.Format("{0}/{1}_{2}", DirectoryPath, delivery.Vehicle.VehicleId, group.Key); var memStream = new MemoryStream(SerializationHelper.Serialize(delivery)); client.CreateFile(memStream, remotePath).ContinueWith(tsk => memStream.Dispose()); } }
public override object AcquireConnection(object txn) { //Debugger.Launch(); try { Uri connectionUri = new Uri(ConnectionString); WebHDFSClient client = new WebHDFSClient(connectionUri, UserName); return(client); } catch (UriFormatException) { throw new Exception("HDFS Connection Manager - Invalid Connection String, check Host and Port"); } }
public override object AcquireConnection(object txn) { //Debugger.Launch(); try { Uri connectionUri = new Uri(ConnectionString); WebHDFSClient client = new WebHDFSClient(connectionUri, UserName); return client; } catch (UriFormatException) { throw new Exception("HDFS Connection Manager - Invalid Connection String, check Host and Port"); } }
public MockIntegrationTests(ITestOutputHelper output) { this.output = output; _server = new TestServer(new WebHostBuilder() .Configure(Configure) .ConfigureServices(ConfigureServices)); var authenticationHeaderValue = new AuthenticationHeaderValue("Basic", Convert.ToBase64String( Encoding.UTF8.GetBytes(string.Format("{0}:{1}", username, password)))); var customHttpMessageHandler = new AuthenticatedHttpMessageHandler(authenticationHeaderValue, _server.CreateHandler()); _webhdfs = new WebHDFSClient(_server.BaseAddress.AbsoluteUri, CustomHttpMessageHandler: customHttpMessageHandler); }
public static void Copy(Uri myUri, string destFolderName) { WebHDFSClient myClient = new WebHDFSClient(myUri, "hadoop"); //drop destination directory (if exists) // myClient.DeleteDirectory(destFolderName, true).Wait(); ////create destination directory // myClient.CreateDirectory(destFolderName).Wait(); ////load file to destination directory // string srcFileName = "D:\\IDE\\DATA SETS\\recipeitems-latest.json"; // myClient.CreateFile(srcFileName, destFolderName + "/" + "recipeitems-latest.json").Wait(); var a = myClient.GetDirectoryStatus(destFolderName); var d = a.GetAwaiter(); d.OnCompleted(() => { var c = d.GetResult().Files.ToList(); foreach (var VARIABLE in c) { Console.WriteLine(VARIABLE.PathSuffix); } Console.Read(); }); var f=myClient.OpenFile(destFolderName + "/4300.txt").GetAwaiter(); f.OnCompleted(() => { Console.WriteLine(f.GetResult().ToString()); }); //var b = myClient.GetFileStatus(destFolderName + "/part-00000").GetAwaiter(); //b.OnCompleted(() => //{ // var s= b.GetResult(); //}); //Console.Read(); }
public static void GetFile(string folderName, string fileName, string uri, string localfolder) { try { //connect to hadoop cluster Uri myUri = new Uri(uri); string userName = "******"; WebHDFSClient myClient = new WebHDFSClient(myUri, userName); string newpathfile = folderName + "/" + fileName; //list file contents of destination directory Console.WriteLine(); Console.WriteLine("Get of file on " + newpathfile); var ss = myClient.OpenFile(newpathfile); //Console.WriteLine(ss.Result); string localFilePath = localfolder + "\\" + fileName; Stream output = File.OpenWrite(localFilePath); Task taskx = ss.Result.Content.ReadAsStreamAsync().ContinueWith(t => { var stream = t.Result; stream.CopyTo(output); }); Console.WriteLine("Download succeeded."); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); Console.ReadLine(); } }
public static void Checksum(string filepath, string uri) { try { //connect to hadoop cluster Uri myUri = new Uri(uri); string userName = "******"; WebHDFSClient myClient = new WebHDFSClient(myUri, userName); var data = myClient.GetFileChecksum(filepath); //data.Result.by Console.WriteLine(data.Result.Checksum); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
public void Write(IEnumerable <Message> messages) { var sensorData = messages.OrderBy(msg => msg.VehicleId) .ThenBy(msg => msg.Timestamp) .Select(msg => msg.Serialize()) .ToArray(); const int chunkSize = 50000; var maxIndex = (int)(Math.Ceiling(sensorData.Length / (double)chunkSize)); var client = new WebHDFSClient(new Uri(@"http://127.0.0.1:50070/"), "Camper"); for (int index = 0; index < maxIndex; index++) { var chunk = sensorData.Skip(index * chunkSize).Take(chunkSize); var content = string.Join("\n", chunk); var memStream = new MemoryStream(Encoding.UTF8.GetBytes(content)); string remoteFile = string.Format("{0}/SensorData_{1}", DirectoryPath, index + 1); var task = client.CreateFile(memStream, remoteFile); task.Wait(); } }
static void Main(string[] args) { var blobAdapter = new BlobStorageAdapter("joydivision", "RMfUv/odmgsSdOcLqYfCLUKDIk9QI556Ly+3kxH8M6YgKcXwKMaoHLd6di201F/v1HVxHOjSMBJKPj1DXNSBQw==", "joydivision", true); var webHdfsClient = new WebHDFSClient("hadoop", blobAdapter); // Upload the ozzie config to appPath in order to submit job string source = AppDomain.CurrentDomain.BaseDirectory + @"\Workflow.xml"; string target = "/handsonlabs/app/Workflow.xml".Replace('\\', '/'); Console.WriteLine("Uploading to " + target); var createdFile = webHdfsClient.CreateFile(source, target); createdFile.Wait(); var oozieClient = new OozieHttpClient(AzureCluster, AzureUserName, AzurePassword); string outputPath = AzureStorageVaultRoot + "/handsonlabs/output/oozieJob/" + Guid.NewGuid().ToString("N"); var oozieJobProperties = new OozieJobProperties( AzureUserName, AzureStorageVaultRoot, "headnodehost:9010", AzureStorageVaultRoot + "/handsonlabs/app/Workflow.xml", "", outputPath); var submitJob = oozieClient.SubmitJob(oozieJobProperties.ToDictionary()); string id = HttpClientTools.GetTaskResults(submitJob).Value <string>("id"); oozieClient.StartJob(id); submitJob.Wait(); // Check job status Task <HttpResponseMessage> t1 = oozieClient.GetJobInfo(id); var res = HttpClientTools.GetTaskResults(t1); }
public void Write(IEnumerable<Message> messages) { var sensorData = messages.OrderBy(msg => msg.VehicleId) .ThenBy(msg => msg.Timestamp) .Select(msg => msg.Serialize()) .ToArray(); const int chunkSize = 50000; var maxIndex = (int)(Math.Ceiling(sensorData.Length / (double)chunkSize)); var client = new WebHDFSClient(new Uri(@"http://127.0.0.1:50070/"), "Camper"); for (int index = 0; index < maxIndex; index++) { var chunk = sensorData.Skip(index * chunkSize).Take(chunkSize); var content = string.Join("\n", chunk); var memStream = new MemoryStream(Encoding.UTF8.GetBytes(content)); string remoteFile = string.Format("{0}/SensorData_{1}", DirectoryPath, index + 1); var task = client.CreateFile(memStream, remoteFile); task.Wait(); } }
public override void ReleaseConnections() { this.client = null; }
public HDFSAccess(string uriString, string userName) { this.webHDFSClient = new WebHDFSClient(new Uri(uriString), userName); }