/// <exception cref="System.IO.IOException"/> private void AddToLocalResources(FileSystem fs, string fileSrcPath, string fileDstPath , string appId, IDictionary <string, LocalResource> localResources, string resources ) { string suffix = appName + "/" + appId + "/" + fileDstPath; Path dst = new Path(fs.GetHomeDirectory(), suffix); if (fileSrcPath == null) { FSDataOutputStream ostream = null; try { ostream = FileSystem.Create(fs, dst, new FsPermission((short)0x1c8)); ostream.WriteUTF(resources); } finally { IOUtils.CloseQuietly(ostream); } } else { fs.CopyFromLocalFile(new Path(fileSrcPath), dst); } FileStatus scFileStatus = fs.GetFileStatus(dst); LocalResource scRsrc = LocalResource.NewInstance(ConverterUtils.GetYarnUrlFromURI (dst.ToUri()), LocalResourceType.File, LocalResourceVisibility.Application, scFileStatus .GetLen(), scFileStatus.GetModificationTime()); localResources[fileDstPath] = scRsrc; }
public virtual DomainSocket CreateSocket(DomainSocketFactory.PathInfo info, int socketTimeout ) { Preconditions.CheckArgument(info.GetPathState() != DomainSocketFactory.PathState. Unusable); bool success = false; DomainSocket sock = null; try { sock = DomainSocket.Connect(info.GetPath()); sock.SetAttribute(DomainSocket.ReceiveTimeout, socketTimeout); success = true; } catch (IOException e) { Log.Warn("error creating DomainSocket", e); } finally { // fall through if (!success) { if (sock != null) { IOUtils.CloseQuietly(sock); } pathMap.Put(info.GetPath(), DomainSocketFactory.PathState.Unusable); sock = null; } } return(sock); }
/// <exception cref="System.IO.IOException"/> protected internal void MakeTestFile(Path path, long length, bool isLazyPersist) { EnumSet <CreateFlag> createFlags = EnumSet.Of(CreateFlag.Create); if (isLazyPersist) { createFlags.AddItem(CreateFlag.LazyPersist); } FSDataOutputStream fos = null; try { fos = fs.Create(path, FsPermission.GetFileDefault(), createFlags, BufferLength, ReplFactor , BlockSize, null); // Allocate a block. byte[] buffer = new byte[BufferLength]; for (int bytesWritten = 0; bytesWritten < length;) { fos.Write(buffer, 0, buffer.Length); bytesWritten += buffer.Length; } if (length > 0) { fos.Hsync(); } } finally { IOUtils.CloseQuietly(fos); } }
Load(long length, FileInputStream blockIn, FileInputStream metaIn, string blockFileName ) { Org.Apache.Hadoop.Hdfs.Server.Datanode.Fsdataset.Impl.MappableBlock mappableBlock = null; MappedByteBuffer mmap = null; FileChannel blockChannel = null; try { blockChannel = blockIn.GetChannel(); if (blockChannel == null) { throw new IOException("Block InputStream has no FileChannel."); } mmap = blockChannel.Map(FileChannel.MapMode.ReadOnly, 0, length); NativeIO.POSIX.GetCacheManipulator().Mlock(blockFileName, mmap, length); VerifyChecksum(length, metaIn, blockChannel, blockFileName); mappableBlock = new Org.Apache.Hadoop.Hdfs.Server.Datanode.Fsdataset.Impl.MappableBlock (mmap, length); } finally { IOUtils.CloseQuietly(blockChannel); if (mappableBlock == null) { if (mmap != null) { NativeIO.POSIX.Munmap(mmap); } } } // unmapping also unlocks return(mappableBlock); }
public override void Run() { while (keepRunning) { OutputStream os = null; try { string filename = "/file-" + rand.NextLong(); os = localClient.Create(filename, false); os.Write(data, 0, rand.Next(data.Length)); IOUtils.CloseQuietly(os); os = null; localClient.Delete(filename, false); Sharpen.Thread.Sleep(50); // Sleep for a bit to avoid killing the system. ++filesCreated; } catch (IOException) { // Just ignore the exception and keep going. ++numFailures; } catch (Exception) { return; } finally { if (os != null) { IOUtils.CloseQuietly(os); } } } }
/// <exception cref="System.Exception"/> public static void UpdateLog4jConfiguration(Type targetClass, string log4jPath) { Properties customProperties = new Properties(); FileInputStream fs = null; InputStream @is = null; try { fs = new FileInputStream(log4jPath); @is = targetClass.GetResourceAsStream("/log4j.properties"); customProperties.Load(fs); Properties originalProperties = new Properties(); originalProperties.Load(@is); foreach (KeyValuePair <object, object> entry in customProperties) { originalProperties.SetProperty(entry.Key.ToString(), entry.Value.ToString()); } LogManager.ResetConfiguration(); PropertyConfigurator.Configure(originalProperties); } finally { IOUtils.CloseQuietly(@is); IOUtils.CloseQuietly(fs); } }
/* @SuppressWarnings("unchecked")*/ private static IList <String> ReadSampleCodes() { Stream input = typeof(PerformanceTester).Assembly.GetFile("sampleCodes.txt"); try { return(IOUtils.ReadLines(input)); } finally { IOUtils.CloseQuietly(input); } }
private String Read(Uri url) { Stream input = WebRequest.Create(url).GetResponse().GetResponseStream(); try { return(IOUtils.ToString(input)); } finally { IOUtils.CloseQuietly(input); } }
public virtual void Shutdown() { lock (this) { if (!enabled) { return; } enabled = false; } IOUtils.CloseQuietly(watcher); }
/// <summary>Verifies the block's checksum.</summary> /// <remarks>Verifies the block's checksum. This is an I/O intensive operation.</remarks> /// <exception cref="System.IO.IOException"/> /// <exception cref="Org.Apache.Hadoop.FS.ChecksumException"/> private static void VerifyChecksum(long length, FileInputStream metaIn, FileChannel blockChannel, string blockFileName) { // Verify the checksum from the block's meta file // Get the DataChecksum from the meta file header BlockMetadataHeader header = BlockMetadataHeader.ReadHeader(new DataInputStream(new BufferedInputStream(metaIn, BlockMetadataHeader.GetHeaderSize()))); FileChannel metaChannel = null; try { metaChannel = metaIn.GetChannel(); if (metaChannel == null) { throw new IOException("Block InputStream meta file has no FileChannel."); } DataChecksum checksum = header.GetChecksum(); int bytesPerChecksum = checksum.GetBytesPerChecksum(); int checksumSize = checksum.GetChecksumSize(); int numChunks = (8 * 1024 * 1024) / bytesPerChecksum; ByteBuffer blockBuf = ByteBuffer.Allocate(numChunks * bytesPerChecksum); ByteBuffer checksumBuf = ByteBuffer.Allocate(numChunks * checksumSize); // Verify the checksum int bytesVerified = 0; while (bytesVerified < length) { Preconditions.CheckState(bytesVerified % bytesPerChecksum == 0, "Unexpected partial chunk before EOF" ); System.Diagnostics.Debug.Assert(bytesVerified % bytesPerChecksum == 0); int bytesRead = FillBuffer(blockChannel, blockBuf); if (bytesRead == -1) { throw new IOException("checksum verification failed: premature EOF"); } blockBuf.Flip(); // Number of read chunks, including partial chunk at end int chunks = (bytesRead + bytesPerChecksum - 1) / bytesPerChecksum; checksumBuf.Limit(chunks * checksumSize); FillBuffer(metaChannel, checksumBuf); checksumBuf.Flip(); checksum.VerifyChunkedSums(blockBuf, checksumBuf, blockFileName, bytesVerified); // Success bytesVerified += bytesRead; blockBuf.Clear(); checksumBuf.Clear(); } } finally { IOUtils.CloseQuietly(metaChannel); } }
/// <exception cref="System.Exception"></exception> private MessageSet GetMessageSet(string xml) { Stream input = Ca.Infoway.Messagebuilder.Platform.ResourceLoader.GetResource(GetType(), xml); try { return(this.marshaller.Unmarshall(input)); } finally { IOUtils.CloseQuietly(input); } }
private byte[] LoadResponse(string doc) { var input = Platform.ResourceLoader.GetResource(GetType(), doc); try { return(ReadFully(input)); } finally { IOUtils.CloseQuietly(input); } }
public XmlDocument CreateFromResource(InputStreamResource resource) { XmlDocument document = null; Stream input = resource.InputStream; try { document = (input == null) ? null : CreateDocumentBuilder().Parse( XmlReader.Create(input)); } finally { IOUtils.CloseQuietly(input); } return(document); }
public void Close() { // some cases require this close here if (toClose != null) { IOUtils.CloseQuietly(toClose); toClose = null; } if (toCloseStream != null) { IOUtils.CloseQuietly(toCloseStream); toClose = null; } }
/// <exception cref="System.Exception"/> public Void Call() { string token = string.Empty; string owner = string.Empty; string renewer = "renewer"; string body = "{\"renewer\":\"" + renewer + "\"}"; Uri url = new Uri("http://localhost:8088/ws/v1/cluster/delegation-token?doAs=client2" ); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Webapp.TestRMWebServicesDelegationTokenAuthentication .SetupConn(conn, "POST", MediaType.ApplicationJson, body); InputStream response = conn.GetInputStream(); NUnit.Framework.Assert.AreEqual(ClientResponse.Status.Ok.GetStatusCode(), conn.GetResponseCode ()); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(response, "UTF8")); for (string line; (line = reader.ReadLine()) != null;) { JSONObject obj = new JSONObject(line); if (obj.Has("token")) { token = obj.GetString("token"); } if (obj.Has("owner")) { owner = obj.GetString("owner"); } } } finally { IOUtils.CloseQuietly(reader); IOUtils.CloseQuietly(response); } NUnit.Framework.Assert.AreEqual("client2", owner); Org.Apache.Hadoop.Security.Token.Token <RMDelegationTokenIdentifier> realToken = new Org.Apache.Hadoop.Security.Token.Token <RMDelegationTokenIdentifier>(); realToken.DecodeFromUrlString(token); NUnit.Framework.Assert.AreEqual("client2", realToken.DecodeIdentifier().GetOwner( ).ToString()); return(null); }
/// <summary>Handle a DFSClient request to create a new memory segment.</summary> /// <param name="clientName">Client name as reported by the client.</param> /// <param name="sock"> /// The DomainSocket to associate with this memory /// segment. When this socket is closed, or the /// other side writes anything to the socket, the /// segment will be closed. This can happen at any /// time, including right after this function returns. /// </param> /// <returns> /// A NewShmInfo object. The caller must close the /// NewShmInfo object once they are done with it. /// </returns> /// <exception cref="System.IO.IOException">If the new memory segment could not be created. /// </exception> public virtual ShortCircuitRegistry.NewShmInfo CreateNewMemorySegment(string clientName , DomainSocket sock) { ShortCircuitRegistry.NewShmInfo info = null; ShortCircuitRegistry.RegisteredShm shm = null; ShortCircuitShm.ShmId shmId = null; lock (this) { if (!enabled) { if (Log.IsTraceEnabled()) { Log.Trace("createNewMemorySegment: ShortCircuitRegistry is " + "not enabled."); } throw new NotSupportedException(); } FileInputStream fis = null; try { do { shmId = ShortCircuitShm.ShmId.CreateRandom(); }while (segments.Contains(shmId)); fis = shmFactory.CreateDescriptor(clientName, ShmLength); shm = new ShortCircuitRegistry.RegisteredShm(clientName, shmId, fis, this); } finally { if (shm == null) { IOUtils.CloseQuietly(fis); } } info = new ShortCircuitRegistry.NewShmInfo(shmId, fis); segments[shmId] = shm; } // Drop the registry lock to prevent deadlock. // After this point, RegisteredShm#handle may be called at any time. watcher.Add(sock, shm); if (Log.IsTraceEnabled()) { Log.Trace("createNewMemorySegment: created " + info.shmId); } return(info); }
/// <summary> /// Returns the content charset specified explicitly in the header or in the content, /// or <tt>null</tt> if none was specified. /// </summary> /// <returns>the content charset specified explicitly in the header or in the content, or <tt>null</tt> if none was specified</returns> public String GetContentCharsetOrNull() { InputStream stream = null; try { stream = GetContentAsStream(); return(EncodingSniffer.SniffEncoding(ResponseHeaders, stream)); } catch (IOException e) { LOG.Warn("Error trying to sniff encoding.", e); return(null); } finally { IOUtils.CloseQuietly(stream); } }
public virtual void TestRamDiskEvictionWithShortCircuitReadHandle() { StartUpCluster(ReplFactor, new StorageType[] { StorageType.RamDisk, StorageType.Default }, (6 * BlockSize - 1), true); // 5 replica + delta, SCR. string MethodName = GenericTestUtils.GetMethodName(); Path path1 = new Path("/" + MethodName + ".01.dat"); Path path2 = new Path("/" + MethodName + ".02.dat"); int Seed = unchecked ((int)(0xFADED)); MakeRandomTestFile(path1, BlockSize, true, Seed); EnsureFileReplicasOnStorageType(path1, StorageType.RamDisk); // Sleep for a short time to allow the lazy writer thread to do its job. // However the block replica should not be evicted from RAM_DISK yet. Sharpen.Thread.Sleep(3 * LazyWriterIntervalSec * 1000); // No eviction should happen as the free ratio is below the threshold FSDataInputStream fis = fs.Open(path1); try { // Keep and open read handle to path1 while creating path2 byte[] buf = new byte[BufferLength]; fis.Read(0, buf, 0, BufferLength); // Create the 2nd file that will trigger RAM_DISK eviction. MakeTestFile(path2, BlockSize * 2, true); EnsureFileReplicasOnStorageType(path2, StorageType.RamDisk); // Ensure path1 is still readable from the open SCR handle. fis.Read(fis.GetPos(), buf, 0, BufferLength); HdfsDataInputStream dfsis = (HdfsDataInputStream)fis; NUnit.Framework.Assert.AreEqual(2 * BufferLength, dfsis.GetReadStatistics().GetTotalBytesRead ()); NUnit.Framework.Assert.AreEqual(2 * BufferLength, dfsis.GetReadStatistics().GetTotalShortCircuitBytesRead ()); } finally { IOUtils.CloseQuietly(fis); } // After the open handle is closed, path1 should be evicted to DISK. TriggerBlockReport(); EnsureFileReplicasOnStorageType(path1, StorageType.Default); }
/// <exception cref="System.IO.IOException"/> public Peer NewConnectedPeer(IPEndPoint addr, Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> blockToken, DatanodeID datanodeId) { Peer peer = null; Socket sock = NetUtils.GetDefaultSocketFactory(fs.GetConf()).CreateSocket(); try { sock.Connect(addr, HdfsServerConstants.ReadTimeout); sock.ReceiveTimeout = HdfsServerConstants.ReadTimeout; peer = TcpPeerServer.PeerFromSocket(sock); } finally { if (peer == null) { IOUtils.CloseQuietly(sock); } } return(peer); }
/// <summary> /// Tries to determine the content type. /// TODO: implement a content type sniffer based on the /// <a href="http://tools.ietf.org/html/draft-abarth-mime-sniff-05">Content-Type Processing Model</a> /// @exception IOException if an IO problem occurs /// </summary> /// <param name="contentType">the contentType header if any</param> /// <param name="contentAsStream">stream allowing to read the downloaded content</param> /// <returns>the sniffed mime type</returns> protected String DetermineContentType(String contentType, InputStream contentAsStream) { byte[] markerUTF8 = { (byte)0xef, (byte)0xbb, (byte)0xbf }; byte[] markerUTF16BE = { (byte)0xfe, (byte)0xff }; byte[] markerUTF16LE = { (byte)0xff, (byte)0xfe }; try { if (!String.IsNullOrEmpty(contentType)) { return(contentType); } byte[] bytes = Read(contentAsStream, 500); if (bytes.Length == 0) { return("text/plain"); } String asAsciiString = Encoding.ASCII.GetString(bytes).ToUpper(); // TODO : Locale.ENGLISH if (asAsciiString.Contains("<HTML")) { return("text/html"); } else if (StartsWith(bytes, markerUTF8) || StartsWith(bytes, markerUTF16BE) || StartsWith(bytes, markerUTF16LE)) { return("text/plain"); } else if (IsBinary(bytes)) { return("application/octet-stream"); } } finally { IOUtils.CloseQuietly(contentAsStream); } return("text/plain"); }
/// <summary> /// Returns the response content as a string, using the specified charset/encoding, /// rather than the charset/encoding specified in the server response. If the specified /// charset/encoding is not supported then the default system encoding is used. /// </summary> /// <param name="encoding">the charset/encoding to use to convert the response content into a string</param> /// <returns>the response content as a string</returns> public String GetContentAsString(String encoding) { InputStream stream = null; try { stream = responseData_.GetInputStream(); if (null == stream) { return(null); } // first verify the charset because we can't read the // input stream twice try { Encoding.GetEncoding(encoding); } catch (Exception e) { String cs = GetContentCharset(); LOG.Warn("Attempted to use unsupported encoding '" + encoding + "'; using default content charset ('" + cs + "')."); return(IOUtils.ToString(stream, cs)); } return(IOUtils.ToString(stream, encoding)); } catch (IOException e) { LOG.Warn(e); return(null); } finally { IOUtils.CloseQuietly(stream); } }
public virtual void ShutDownCluster() { // Dump all RamDisk JMX metrics before shutdown the cluster PrintRamDiskJMXMetrics(); if (fs != null) { fs.Close(); fs = null; client = null; } if (cluster != null) { cluster.ShutdownDataNodes(); cluster.Shutdown(); cluster = null; } if (jmx != null) { jmx = null; } IOUtils.CloseQuietly(sockDir); sockDir = null; }
/// <exception cref="System.Exception"/> public string Call() { string ret = null; string body = "{\"renewer\":\"" + renewer + "\"}"; Uri url = new Uri("http://localhost:8088/ws/v1/cluster/delegation-token"); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); Org.Apache.Hadoop.Yarn.Server.Resourcemanager.Webapp.TestRMWebServicesDelegationTokenAuthentication .SetupConn(conn, "POST", MediaType.ApplicationJson, body); InputStream response = conn.GetInputStream(); NUnit.Framework.Assert.AreEqual(ClientResponse.Status.Ok.GetStatusCode(), conn.GetResponseCode ()); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(response, "UTF8")); for (string line; (line = reader.ReadLine()) != null;) { JSONObject obj = new JSONObject(line); if (obj.Has("token")) { reader.Close(); response.Close(); ret = obj.GetString("token"); break; } } } finally { IOUtils.CloseQuietly(reader); IOUtils.CloseQuietly(response); } return(ret); }
public void Initialize() { System.Console.WriteLine("About to init message service"); if (!Initialized()) { MessageSetMarshaller marshaller = new MessageSetMarshaller(); IList <MessageSet> list = new List <MessageSet>(); /* foreach */ foreach (var resourcePair in Names) { System.Console.WriteLine("List of names:" + resourcePair.Name); } foreach (var resourcePair in Names) { System.Console.WriteLine(resourcePair.Name + "." + resourcePair.Assembly.FullName); Stream input = ResourceLoader.GetResource(resourcePair.Assembly, resourcePair.Name); if (input == null) { System.Console.WriteLine("Input is null!!"); } try { ILOG.J2CsMapping.Collections.Generics.Collections.Add(list, marshaller.Unmarshall(input)); } catch (Exception e) { throw new Exception("Could not read " + resourcePair.Name, e); } finally { IOUtils.CloseQuietly(input); } } this.messageSets = ILOG.J2CsMapping.Collections.Generics.Collections.UnmodifiableList(list); } }
public virtual void Run() { FsDatasetCache.Value value; if (this.ShouldDefer()) { this._enclosing.deferredUncachingExecutor.Schedule(this, this._enclosing.revocationPollingMs , TimeUnit.Milliseconds); return; } lock (this._enclosing) { value = this._enclosing.mappableBlockMap[this.key]; } Preconditions.CheckNotNull(value); Preconditions.CheckArgument(value.state == FsDatasetCache.State.Uncaching); IOUtils.CloseQuietly(value.mappableBlock); lock (this._enclosing) { Sharpen.Collections.Remove(this._enclosing.mappableBlockMap, this.key); } long newUsedBytes = this._enclosing.usedBytesCount.Release(value.mappableBlock.GetLength ()); this._enclosing.numBlocksCached.AddAndGet(-1); this._enclosing.dataset.datanode.GetMetrics().IncrBlocksUncached(1); if (this.revocationTimeMs != 0) { FsDatasetCache.Log.Debug("Uncaching of {} completed. usedBytes = {}", this.key, newUsedBytes ); } else { FsDatasetCache.Log.Debug("Deferred uncaching of {} completed. usedBytes = {}", this .key, newUsedBytes); } }
public void TestGetKeys() { var namespace1ElementsNum = 1000; var namespace2ElementsNum = 1000; var fieldName = "get-keys-test"; var backend = CreateKeyedBackend(IntSerializer.Instance); try { var ns1 = "ns1"; var keyedState1 = backend.GetPartitionedState(ns1, StringSerializer.Instance, new ValueStateDescriptor <int>(fieldName, IntSerializer.Instance)); for (var key = 0; key < namespace1ElementsNum; key++) { backend.CurrentKey = key; keyedState1.Value = key * 2; } var ns2 = "ns2"; var keyedState2 = backend.GetPartitionedState(ns1, StringSerializer.Instance, new ValueStateDescriptor <int>(fieldName, IntSerializer.Instance)); for (var key = 0; key < namespace2ElementsNum; key++) { backend.CurrentKey = key; keyedState2.Value = key * 2; } // valid for namespace1 try { using var keys = backend.GetKeys(fieldName, ns1).OrderBy(it => it).GetEnumerator(); for (var expectedKey = 0; expectedKey < namespace1ElementsNum; expectedKey++) { Assert.True(keys.MoveNext()); Assert.Equal(expectedKey, keys.Current); } Assert.False(keys.MoveNext()); } catch (Exception e) { _testOutputHelper.WriteLine(e.ToString()); } // valid for namespace2 try { using var keys = backend.GetKeys(fieldName, ns2).OrderBy(it => it).GetEnumerator(); for (var expectedKey = 0; expectedKey < namespace1ElementsNum; expectedKey++) { Assert.True(keys.MoveNext()); Assert.Equal(expectedKey, keys.Current); } Assert.False(keys.MoveNext()); } catch (Exception e) { _testOutputHelper.WriteLine(e.ToString()); } } finally { IOUtils.CloseQuietly(backend); backend.Dispose(); } }
/// <exception cref="System.Exception"/> private void InitKDCServer() { string orgName = conf.GetProperty(OrgName); string orgDomain = conf.GetProperty(OrgDomain); string bindAddress = conf.GetProperty(KdcBindAddress); IDictionary <string, string> map = new Dictionary <string, string>(); map["0"] = orgName.ToLower(Extensions.GetEnglishCulture()); map["1"] = orgDomain.ToLower(Extensions.GetEnglishCulture()); map["2"] = orgName.ToUpper(Extensions.GetEnglishCulture()); map["3"] = orgDomain.ToUpper(Extensions.GetEnglishCulture()); map["4"] = bindAddress; ClassLoader cl = Thread.CurrentThread().GetContextClassLoader(); InputStream is1 = cl.GetResourceAsStream("minikdc.ldiff"); SchemaManager schemaManager = ds.GetSchemaManager(); LdifReader reader = null; try { string content = StrSubstitutor.Replace(IOUtils.ToString(is1), map); reader = new LdifReader(new StringReader(content)); foreach (LdifEntry ldifEntry in reader) { ds.GetAdminSession().Add(new DefaultEntry(schemaManager, ldifEntry.GetEntry())); } } finally { IOUtils.CloseQuietly(reader); IOUtils.CloseQuietly(is1); } KerberosConfig kerberosConfig = new KerberosConfig(); kerberosConfig.SetMaximumRenewableLifetime(long.Parse(conf.GetProperty(MaxRenewableLifetime ))); kerberosConfig.SetMaximumTicketLifetime(long.Parse(conf.GetProperty(MaxTicketLifetime ))); kerberosConfig.SetSearchBaseDn(string.Format("dc=%s,dc=%s", orgName, orgDomain)); kerberosConfig.SetPaEncTimestampRequired(false); //kdc = new KdcServer(kerberosConfig); kdc = new KdcServer(); kdc.SetDirectoryService(ds); // transport string transport = conf.GetProperty(Transport); if (transport.Trim().Equals("TCP")) { kdc.AddTransports(new TcpTransport(bindAddress, port, 3, 50)); } else { if (transport.Trim().Equals("UDP")) { kdc.AddTransports(new UdpTransport(port)); } else { throw new ArgumentException("Invalid transport: " + transport); } } kdc.SetServiceName(conf.GetProperty(Instance)); kdc.Start(); StringBuilder sb = new StringBuilder(); InputStream is2 = cl.GetResourceAsStream("minikdc-krb5.conf"); BufferedReader r = null; try { r = new BufferedReader(new InputStreamReader(is2, Charsets.Utf8)); string line = r.ReadLine(); while (line != null) { sb.Append(line).Append("{3}"); line = r.ReadLine(); } } finally { IOUtils.CloseQuietly(r); IOUtils.CloseQuietly(is2); } krb5conf = new FilePath(workDir, "krb5.conf").GetAbsoluteFile(); FileUtils.WriteStringToFile(krb5conf, MessageFormat.Format(sb.ToString(), GetRealm (), GetHost(), Extensions.ToString(GetPort()), Runtime.GetProperty("line.separator" ))); Runtime.SetProperty(JavaSecurityKrb5Conf, krb5conf.GetAbsolutePath()); Runtime.SetProperty(SunSecurityKrb5Debug, conf.GetProperty(Debug, "false")); // refresh the config Type classRef; if (Runtime.GetProperty("java.vendor").Contains("IBM")) { classRef = Runtime.GetType("com.ibm.security.krb5.internal.Config"); } else { classRef = Runtime.GetType("sun.security.krb5.Config"); } MethodInfo refreshMethod = classRef.GetMethod("refresh", new Type[0]); refreshMethod.Invoke(classRef, new object[0]); Log.Info("MiniKdc listening at port: {}", GetPort()); Log.Info("MiniKdc setting JVM krb5.conf to: {}", krb5conf.GetAbsolutePath()); }
public override void Shutdown() { IOUtils.CloseQuietly(_eventBusRegistration); _eventBusRegistration = null; }
public virtual void Run() { bool success = false; FileInputStream blockIn = null; FileInputStream metaIn = null; MappableBlock mappableBlock = null; ExtendedBlock extBlk = new ExtendedBlock(this.key.GetBlockPoolId(), this.key.GetBlockId (), this.length, this.genstamp); long newUsedBytes = this._enclosing.usedBytesCount.Reserve(this.length); bool reservedBytes = false; try { if (newUsedBytes < 0) { FsDatasetCache.Log.Warn("Failed to cache " + this.key + ": could not reserve " + this.length + " more bytes in the cache: " + DFSConfigKeys.DfsDatanodeMaxLockedMemoryKey + " of " + this._enclosing.maxBytes + " exceeded."); return; } reservedBytes = true; try { blockIn = (FileInputStream)this._enclosing.dataset.GetBlockInputStream(extBlk, 0); metaIn = DatanodeUtil.GetMetaDataInputStream(extBlk, this._enclosing.dataset); } catch (InvalidCastException e) { FsDatasetCache.Log.Warn("Failed to cache " + this.key + ": Underlying blocks are not backed by files." , e); return; } catch (FileNotFoundException) { FsDatasetCache.Log.Info("Failed to cache " + this.key + ": failed to find backing " + "files."); return; } catch (IOException e) { FsDatasetCache.Log.Warn("Failed to cache " + this.key + ": failed to open file", e); return; } try { mappableBlock = MappableBlock.Load(this.length, blockIn, metaIn, this.blockFileName ); } catch (ChecksumException) { // Exception message is bogus since this wasn't caused by a file read FsDatasetCache.Log.Warn("Failed to cache " + this.key + ": checksum verification failed." ); return; } catch (IOException e) { FsDatasetCache.Log.Warn("Failed to cache " + this.key, e); return; } lock (this._enclosing) { FsDatasetCache.Value value = this._enclosing.mappableBlockMap[this.key]; Preconditions.CheckNotNull(value); Preconditions.CheckState(value.state == FsDatasetCache.State.Caching || value.state == FsDatasetCache.State.CachingCancelled); if (value.state == FsDatasetCache.State.CachingCancelled) { Sharpen.Collections.Remove(this._enclosing.mappableBlockMap, this.key); FsDatasetCache.Log.Warn("Caching of " + this.key + " was cancelled."); return; } this._enclosing.mappableBlockMap[this.key] = new FsDatasetCache.Value(mappableBlock , FsDatasetCache.State.Cached); } FsDatasetCache.Log.Debug("Successfully cached {}. We are now caching {} bytes in" + " total.", this.key, newUsedBytes); this._enclosing.dataset.datanode.GetShortCircuitRegistry().ProcessBlockMlockEvent (this.key); this._enclosing.numBlocksCached.AddAndGet(1); this._enclosing.dataset.datanode.GetMetrics().IncrBlocksCached(1); success = true; } finally { IOUtils.CloseQuietly(blockIn); IOUtils.CloseQuietly(metaIn); if (!success) { if (reservedBytes) { this._enclosing.usedBytesCount.Release(this.length); } FsDatasetCache.Log.Debug("Caching of {} was aborted. We are now caching only {} " + "bytes in total.", this.key, this._enclosing.usedBytesCount.Get()); if (mappableBlock != null) { mappableBlock.Close(); } this._enclosing.numBlocksFailedToCache.IncrementAndGet(); lock (this._enclosing) { Sharpen.Collections.Remove(this._enclosing.mappableBlockMap, this.key); } } } }