protected void GenerateCacheName(CachingStrategy cachingStrategy) { if (!string.IsNullOrWhiteSpace(cachingStrategy.CacheName)) { #line default #line hidden #line 123 "C:\Users\pkost\Documents\Work\Chat\Chat\DotVVM.PWA\Templates\ServiceWorker\ServiceWorkerConfigurationTemplate.tt" this.Write(" cacheName: \'"); #line default #line hidden #line 124 "C:\Users\pkost\Documents\Work\Chat\Chat\DotVVM.PWA\Templates\ServiceWorker\ServiceWorkerConfigurationTemplate.tt" this.Write(this.ToStringHelper.ToStringWithCulture(cachingStrategy.CacheName)); #line default #line hidden #line 124 "C:\Users\pkost\Documents\Work\Chat\Chat\DotVVM.PWA\Templates\ServiceWorker\ServiceWorkerConfigurationTemplate.tt" this.Write("\',\r\n"); #line default #line hidden #line 125 "C:\Users\pkost\Documents\Work\Chat\Chat\DotVVM.PWA\Templates\ServiceWorker\ServiceWorkerConfigurationTemplate.tt" } }
public GoogleMapsClient(string apiKey, string signingKey, IJsonFactory <MetadataTypeT> metadataDecoder, IJsonFactory <GeocodingResponse> geocodingDecoder, CachingStrategy cache) { this.apiKey = apiKey ?? throw new ArgumentNullException(nameof(apiKey)); this.signingKey = signingKey ?? throw new ArgumentNullException(nameof(signingKey)); this.metadataDecoder = metadataDecoder ?? throw new ArgumentNullException(nameof(metadataDecoder)); this.geocodingDecoder = geocodingDecoder ?? throw new ArgumentNullException(nameof(geocodingDecoder)); this.cache = cache ?? throw new ArgumentNullException(nameof(cache)); foreach (var(fileRef, metadata) in cache.Get(metadataDecoder)) { if (metadata.Location != null) { _ = Cache(metadata); var imageRef = metadata.Pano_ID + MediaType.Image.Jpeg; if (cache.IsCached(imageRef)) { knownImages.Add(metadata.Pano_ID); } } else { _ = cache.Delete(fileRef); } } foreach (var fileRef in cache.GetContentReferences(MediaType.Image.Jpeg)) { knownImages.Add(fileRef.CacheID); } knownImages.Sort(); }
/// <exception cref="System.IO.IOException"/> public override void WriteBlock(ExtendedBlock blk, StorageType storageType, Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> blockToken, string clientName, DatanodeInfo[] targets, StorageType [] targetStorageTypes, DatanodeInfo source, BlockConstructionStage stage, int pipelineSize , long minBytesRcvd, long maxBytesRcvd, long latestGenerationStamp, DataChecksum requestedChecksum, CachingStrategy cachingStrategy, bool allowLazyPersist, bool pinning, bool[] targetPinnings) { DataTransferProtos.ClientOperationHeaderProto header = DataTransferProtoUtil.BuildClientHeader (blk, clientName, blockToken); DataTransferProtos.ChecksumProto checksumProto = DataTransferProtoUtil.ToProto(requestedChecksum ); DataTransferProtos.OpWriteBlockProto.Builder proto = DataTransferProtos.OpWriteBlockProto .NewBuilder().SetHeader(header).SetStorageType(PBHelper.ConvertStorageType(storageType )).AddAllTargets(PBHelper.Convert(targets, 1)).AddAllTargetStorageTypes(PBHelper .ConvertStorageTypes(targetStorageTypes, 1)).SetStage(DataTransferProtoUtil.ToProto (stage)).SetPipelineSize(pipelineSize).SetMinBytesRcvd(minBytesRcvd).SetMaxBytesRcvd (maxBytesRcvd).SetLatestGenerationStamp(latestGenerationStamp).SetRequestedChecksum (checksumProto).SetCachingStrategy(GetCachingStrategy(cachingStrategy)).SetAllowLazyPersist (allowLazyPersist).SetPinning(pinning).AddAllTargetPinnings(PBHelper.Convert(targetPinnings , 1)); if (source != null) { proto.SetSource(PBHelper.ConvertDatanodeInfo(source)); } Send(@out, OP.WriteBlock, ((DataTransferProtos.OpWriteBlockProto)proto.Build())); }
/// <exception cref="System.IO.IOException"/> internal virtual void WriteBlock(ExtendedBlock block, BlockConstructionStage stage , long newGS, DataChecksum checksum) { sender.WriteBlock(block, StorageType.Default, BlockTokenSecretManager.DummyToken, "cl", new DatanodeInfo[1], new StorageType[1], null, stage, 0, block.GetNumBytes (), block.GetNumBytes(), newGS, checksum, CachingStrategy.NewDefaultStrategy(), false, false, null); }
/// <summary> /// Get sources from server and returns the contents /// </summary> ///<param name="requestedURL">!!.</param> ///<param name="requestContent">!!.</param> ///<param name="requestContentType">!!.</param> ///<param name="decryptResponse">!!.</param> ///<param name="allowRetry">!!.</param> ///<param name="isError">!!.</param> ///<returns>!!.</returns> internal byte[] GetContent(String requestedURL, byte[] requestContent, String requestContentType, bool decryptResponse, bool allowRetry, out bool isError) { var cachingStrategy = new CachingStrategy() { CanWriteToCache = true, CachedContentShouldBeReturned = true }; return(GetContent(requestedURL, requestContent, requestContentType, decryptResponse, allowRetry, cachingStrategy, out isError)); }
public virtual BlockReaderLocal.Builder SetCachingStrategy(CachingStrategy cachingStrategy ) { long readahead = cachingStrategy.GetReadahead() != null?cachingStrategy.GetReadahead () : DFSConfigKeys.DfsDatanodeReadaheadBytesDefault; this.maxReadahead = (int)Math.Min(int.MaxValue, readahead); return(this); }
private static async Task <ImageData> RunFileTestAsync(string imageFileName, bool deleteFile, bool runTest) { if (imageFileName is null) { throw new ArgumentNullException(nameof(imageFileName)); } if (imageFileName.Length == 0) { throw new ArgumentException("path must not be empty string", nameof(imageFileName)); } var myPictures = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures); var cacheFileName = Path.Combine(myPictures, imageFileName); var cacheFile = new FileInfo(cacheFileName); if (deleteFile && cacheFile.Exists) { cacheFile.Delete(); } var cache = new CachingStrategy { new FileCacheLayer(cacheFile.Directory) }; var imageRequest = new ImageRequest( new Uri("https://www.seanmcbeth.com"), "2015-05.min.jpg"); var imageDecoder = new TranscoderCodec <BitMiracle.LibJpeg.JpegImage, ImageData>( new LibJpegNETCodec(80), new LibJpegNETImageDataTranscoder()); var actual = await cache .LoadAsync(imageDecoder, imageRequest) .ConfigureAwait(false); if (runTest) { var path = Path.Combine(myPictures, "portrait-expected.jpg"); using var stream = FileDataSource.Instance.GetStream(path); var expected = imageDecoder.Deserialize(stream); var expectedData = expected.GetData(); var actualData = actual.GetData(); Assert.AreEqual(expected.Info.Dimensions.Width, actual.Info.Dimensions.Width); Assert.AreEqual(expected.Info.Dimensions.Height, actual.Info.Dimensions.Height); Assert.AreEqual(expectedData.Length, actualData.Length); for (var i = 0; i < expectedData.Length; ++i) { Assert.AreEqual(expectedData[i], actualData[i]); } } return(actual); }
public static async Task Main(string[] args) { var text = "The quick brown fox jumps over the lazy dog."; if (args?.Length > 1) { text = args[1]; } // credentials var userProfile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); var keyFile = Path.Combine(userProfile, "Projects", "DevKeys", "azure-speech.txt"); var lines = File.ReadAllLines(keyFile); var subscriptionKey = lines[0]; var region = lines[1]; var resourceName = lines[2]; // caching var cacheDirName = Path.Combine(userProfile, "Projects"); var cacheDir = new DirectoryInfo(cacheDirName); var cache = new CachingStrategy { new FileCacheLayer(cacheDir) }; var voiceListDecoder = new JsonFactory <Voice[]>(); var audioDecoder = new NAudioAudioDataDecoder(); var ttsClient = new TextToSpeechClient( region, subscriptionKey, resourceName, voiceListDecoder, audioDecoder, cache); var voices = await ttsClient .GetVoicesAsync() .ConfigureAwait(false); var voice = Array.Find(voices, v => v.Locale == "en-US" && v.Gender == "Female"); try { //await DecodeAudio(text, audioDecoder, ttsClient, voice); await PlayAudioAsync(text, audioDecoder, ttsClient, voice).ConfigureAwait(false); Console.WriteLine("Success!"); } catch (Exception exp) { Console.WriteLine(exp.Message); } }
/// <exception cref="System.IO.IOException"/> public override void ReadBlock(ExtendedBlock blk, Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> blockToken, string clientName, long blockOffset, long length , bool sendChecksum, CachingStrategy cachingStrategy) { DataTransferProtos.OpReadBlockProto proto = ((DataTransferProtos.OpReadBlockProto )DataTransferProtos.OpReadBlockProto.NewBuilder().SetHeader(DataTransferProtoUtil .BuildClientHeader(blk, clientName, blockToken)).SetOffset(blockOffset).SetLen(length ).SetSendChecksums(sendChecksum).SetCachingStrategy(GetCachingStrategy(cachingStrategy )).Build()); Send(@out, OP.ReadBlock, proto); }
public void GetAllMetadata() { var cache = new CachingStrategy { new FileCacheLayer(GmapsStreamingAssetsDir) }; var gmaps = new GoogleMapsClient(apiKey, signingKey, metadataDecoder, geocodingDecoder, cache); var files = gmaps.CachedMetadata.ToArray(); Assert.AreNotEqual(0, files.Length); }
public void SetIO(CachingStrategy cache, IImageCodec <Texture2D> codec) { this.cache = cache; this.codec = codec; var existing = GetComponentsInChildren <Photosphere>(); foreach (var photo in existing) { photo.Deactivate(); Initialize(photo); } }
// try reading a block using a BlockReader directly private static void TryRead(Configuration conf, LocatedBlock lblock, bool shouldSucceed ) { IPEndPoint targetAddr = null; IOException ioe = null; BlockReader blockReader = null; ExtendedBlock block = lblock.GetBlock(); try { DatanodeInfo[] nodes = lblock.GetLocations(); targetAddr = NetUtils.CreateSocketAddr(nodes[0].GetXferAddr()); blockReader = new BlockReaderFactory(new DFSClient.Conf(conf)).SetFileName(BlockReaderFactory .GetFileName(targetAddr, "test-blockpoolid", block.GetBlockId())).SetBlock(block ).SetBlockToken(lblock.GetBlockToken()).SetInetSocketAddress(targetAddr).SetStartOffset (0).SetLength(-1).SetVerifyChecksum(true).SetClientName("TestBlockTokenWithDFS") .SetDatanodeInfo(nodes[0]).SetCachingStrategy(CachingStrategy.NewDefaultStrategy ()).SetClientCacheContext(ClientContext.GetFromConf(conf)).SetConfiguration(conf ).SetRemotePeerFactory(new _RemotePeerFactory_162(conf)).Build(); } catch (IOException ex) { ioe = ex; } finally { if (blockReader != null) { try { blockReader.Close(); } catch (IOException e) { throw new RuntimeException(e); } } } if (shouldSucceed) { NUnit.Framework.Assert.IsNotNull("OP_READ_BLOCK: access token is invalid, " + "when it is expected to be valid" , blockReader); } else { NUnit.Framework.Assert.IsNotNull("OP_READ_BLOCK: access token is valid, " + "when it is expected to be invalid" , ioe); NUnit.Framework.Assert.IsTrue("OP_READ_BLOCK failed due to reasons other than access token: " , ioe is InvalidBlockTokenException); } }
private static DataTransferProtos.CachingStrategyProto GetCachingStrategy(CachingStrategy cachingStrategy) { DataTransferProtos.CachingStrategyProto.Builder builder = DataTransferProtos.CachingStrategyProto .NewBuilder(); if (cachingStrategy.GetReadahead() != null) { builder.SetReadahead(cachingStrategy.GetReadahead()); } if (cachingStrategy.GetDropBehind() != null) { builder.SetDropBehind(cachingStrategy.GetDropBehind()); } return((DataTransferProtos.CachingStrategyProto)builder.Build()); }
public async Task GetMetadataByFileRefAsync() { var cache = new CachingStrategy { new FileCacheLayer(GmapsStreamingAssetsDir) }; var gmaps = new GoogleMapsClient(apiKey, signingKey, metadataDecoder, geocodingDecoder, cache); var metadata = gmaps.CachedMetadata.FirstOrDefault(); var pano = metadata.Pano_id; var fileRef = new ContentReference(pano, MediaType.Application_Json); var metadata2 = await cache.LoadAsync(metadataDecoder, fileRef) .ConfigureAwait(false); Assert.AreEqual(metadata.Pano_id, metadata2.Pano_id); }
public virtual void Init() { var keyFile = Path.Combine(CachingTests.TestRootDir, "DevKeys", "google-streetview.txt"); var lines = File.ReadAllLines(keyFile); apiKey = lines[0]; signingKey = lines[1]; var cacheDirName = Path.Combine(CachingTests.TestRootDir, "GoogleMaps"); var cacheDir = new DirectoryInfo(cacheDirName); cache = new CachingStrategy { new FileCacheLayer(cacheDir) }; }
public void Setup() { var userProfile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); var assetsRoot = Path.Combine(userProfile, "Box", "VR Initiatives", "Engineering", "Assets"); var keyFile = Path.Combine(assetsRoot, "DevKeys", "azure-speech.txt"); var lines = File.ReadAllLines(keyFile); subscriptionKey = lines[0]; region = lines[1]; resourceName = lines[2]; var cacheDirName = Path.Combine(assetsRoot, "Azure"); var cacheDir = new DirectoryInfo(cacheDirName); cache = new CachingStrategy { new FileCacheLayer(cacheDir) }; }
/// <summary>Get a BlockReader for the given block.</summary> /// <exception cref="System.IO.IOException"/> public static BlockReader GetBlockReader(MiniDFSCluster cluster, LocatedBlock testBlock , int offset, int lenToRead) { IPEndPoint targetAddr = null; ExtendedBlock block = testBlock.GetBlock(); DatanodeInfo[] nodes = testBlock.GetLocations(); targetAddr = NetUtils.CreateSocketAddr(nodes[0].GetXferAddr()); DistributedFileSystem fs = cluster.GetFileSystem(); return(new BlockReaderFactory(fs.GetClient().GetConf()).SetInetSocketAddress(targetAddr ).SetBlock(block).SetFileName(targetAddr.ToString() + ":" + block.GetBlockId()). SetBlockToken(testBlock.GetBlockToken()).SetStartOffset(offset).SetLength(lenToRead ).SetVerifyChecksum(true).SetClientName("BlockReaderTestUtil").SetDatanodeInfo(nodes [0]).SetClientCacheContext(ClientContext.GetFromConf(fs.GetConf())).SetCachingStrategy (CachingStrategy.NewDefaultStrategy()).SetConfiguration(fs.GetConf()).SetAllowShortCircuitLocalReads (true).SetRemotePeerFactory(new _RemotePeerFactory_196(fs)).Build()); }
public override Func <Target, Equinox.Core.IStream <TEvent, TState> > Resolve <TEvent, TState>( FsCodec.IUnionEncoder <TEvent, byte[], object> codec, Func <TState, IEnumerable <TEvent>, TState> fold, TState initial, Func <TEvent, bool> isOrigin = null, Func <TState, TEvent> compact = null) { var accessStrategy = isOrigin == null && compact == null ? null : AccessStrategy <TEvent, TState> .NewSnapshot(FuncConvert.FromFunc(isOrigin), FuncConvert.FromFunc(compact)); var cacheStrategy = _cache == null ? null : CachingStrategy.NewSlidingWindow(_cache, TimeSpan.FromMinutes(20)); var resolver = new Resolver <TEvent, TState, object>(_store, codec, FuncConvert.FromFunc(fold), initial, cacheStrategy, accessStrategy); return(t => resolver.Resolve(t)); }
public override Func <Target, IStream <TEvent, TState> > Resolve <TEvent, TState>( IUnionEncoder <TEvent, byte[]> codec, Func <TState, IEnumerable <TEvent>, TState> fold, TState initial, Func <TEvent, bool> isOrigin = null, Func <TState, TEvent> compact = null) { var accessStrategy = isOrigin == null && compact == null ? null : AccessStrategy <TEvent, TState> .NewRollingSnapshots(FuncConvert.FromFunc(isOrigin), FuncConvert.FromFunc(compact)); var cacheStrategy = _cache == null ? null : CachingStrategy.NewSlidingWindow(_cache, TimeSpan.FromMinutes(20)); var resolver = new GesResolver <TEvent, TState>(_gateway, codec, FuncConvert.FromFunc(fold), initial, accessStrategy, cacheStrategy); return(t => resolver.Resolve.Invoke(t)); }
public override Func <string, IStream <TEvent, TState> > Resolve <TEvent, TState>( FsCodec.IEventCodec <TEvent, byte[], object> codec, Func <TState, IEnumerable <TEvent>, TState> fold, TState initial, Func <TEvent, bool> isOrigin = null, Func <TState, TEvent> toSnapshot = null) { var accessStrategy = isOrigin == null && toSnapshot == null ? null : AccessStrategy <TEvent, TState> .NewRollingSnapshots(FuncConvert.FromFunc(isOrigin), FuncConvert.FromFunc(toSnapshot)); var cacheStrategy = _cache == null ? null : CachingStrategy.NewSlidingWindow(_cache, TimeSpan.FromMinutes(20)); var cat = new EventStoreCategory <TEvent, TState, object>(_connection, codec, FuncConvert.FromFunc(fold), initial, cacheStrategy, accessStrategy); return(t => cat.Resolve(t)); }
/// <summary> /// Before populating a target object, generates mappings or loads from cache /// </summary> /// <param name="target"></param> /// <param name="source"></param> protected virtual void OnBeforePopulate(TTarget target, TSource source) { if (CachingStrategy is NullCachingStrategy && !MappingStrategy.GeneratedMappings) { MappingStrategy.GetMappings(source, typeof(TTarget), MatchingStrategy, AutoMapUnspecifiedTargets); } else { var cacheKey = GetCacheKey(); var cachedMappings = CachingStrategy.Get(cacheKey) as List <IPropertyMapping>; if (cachedMappings == null) { cachedMappings = MappingStrategy.GetMappings(source, typeof(TTarget), MatchingStrategy, AutoMapUnspecifiedTargets); CachingStrategy.Set(cacheKey, cachedMappings); } else { MappingStrategy.SetMappings(cachedMappings); } } }
private static void Main() { Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.ThreadException += Application_ThreadException; var metadataDecoder = new JsonFactory <MetadataResponse>(); var geocodingDecoder = new JsonFactory <GeocodingResponse>(); var userProfile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); var assetsRoot = Path.Combine(userProfile, "Box", "VR Initiatives", "Engineering", "Assets"); var keyFileName = Path.Combine(assetsRoot, "DevKeys", "google-streetview.txt"); var gmapsCacheDirName = Path.Combine(assetsRoot, "GoogleMaps"); var gmapsCacheDir = new DirectoryInfo(gmapsCacheDirName); var cache = new CachingStrategy { new FileCacheLayer(gmapsCacheDir) }; var lines = File.ReadAllLines(keyFileName); var apiKey = lines[0]; var signingKey = lines[1]; gmaps = new GoogleMapsClient( apiKey, signingKey, metadataDecoder, geocodingDecoder, cache); imageDecoder = new GDICodec(MediaType.Image_Jpeg); form = new ImageViewer(); form.LocationSubmitted += Form_LocationSubmitted; form.LatLngSubmitted += Form_LatLngSubmitted; form.PanoSubmitted += Form_PanoSubmitted; using (form) { Application.Run(form); } }
public VoicesClient(string azureRegion, string azureSubscriptionKey, IJsonDecoder <Voice[]> voiceListDecoder, CachingStrategy cache) { if (string.IsNullOrEmpty(azureRegion)) { throw new ArgumentException("Must provide a service region", nameof(azureRegion)); } if (string.IsNullOrEmpty(azureSubscriptionKey)) { throw new ArgumentException("Must provide a subscription key", nameof(azureSubscriptionKey)); } if (voiceListDecoder is null || voiceListDecoder.InputContentType != MediaType.Application_Json) { throw new ArgumentException("Must provide a JSON deserializer for the voice list data", nameof(voiceListDecoder)); } Cache = cache ?? new CachingStrategy(); AzureRegion = azureRegion; this.azureSubscriptionKey = azureSubscriptionKey; this.voiceListDecoder = voiceListDecoder; }
public MemoryCache(CachingStrategy <TItem> cachingStrategy) { _cachingStrategy = cachingStrategy; }
public TextToSpeechClient(string azureRegion, string azureSubscriptionKey, string azureResourceName, IJsonDecoder <Voice[]> voiceListDecoder, AudioFormat outputFormat, IAudioDecoder audioDecoder, CachingStrategy cache) : base(azureRegion, azureSubscriptionKey, azureResourceName, voiceListDecoder, outputFormat, cache) { this.audioDecoder = audioDecoder ?? throw new ArgumentException("Must provide an audio decoder", nameof(audioDecoder)); CheckDecoderFormat(); }
public override void Awake() { base.Awake(); processor = new UnityTexture2DProcessor(); Find.Any(out loadingBar); Find.Any(out gps); if (!this.FindClosest(out photospheres)) { photospheres = this.Ensure <PhotosphereManager>(); } Find.Any(out avatar); navPlane = avatar.GroundPlane.Ensure <Clickable>(); navPlane.Activate(); navPointer = transform.Find("NavPointer"); if (navPointer != null) { navPointer.Deactivate(); } Find.Any(out input); cache = new CachingStrategy(); #if UNITY_EDITOR this.ReceiveCredentials(); locationInput = this.Ensure <EditorTextInput>(); locationInput.OnSubmit.AddListener(new UnityAction <string>(SetLocation)); if (!string.IsNullOrEmpty(locationInput.value)) { SetLocation(locationInput.value); } else if (gps != null && gps.HasCoord) { SetLocation(gps.Coord.ToString()); } var userProfile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); var assetsRoot = Path.Combine(userProfile, "Box", "VR Initiatives", "Engineering", "Assets"); var oldCubemapPath = Path.Combine(assetsRoot, "GoogleMaps"); var oldGmapsPath = Path.Combine(oldCubemapPath, "streetview", "maps", "api"); cache.AddBackup(new FileCacheLayer(oldCubemapPath)); cache.AddBackup(new FileCacheLayer(oldGmapsPath)); #else if (gps != null && gps.HasCoord) { SetLocation(gps.Coord.ToString()); } #endif var newGmapsPath = Path.Combine(CachePrefix, "Google", "StreetView"); cache.Add(new StreamingAssetsCacheLayer(newGmapsPath)); codec = new UnityTexture2DCodec(MediaType.Image.Jpeg); var metadataDecoder = new JsonFactory <MetadataTypeT>(); var geocodingDecoder = new JsonFactory <GeocodingResponse>(); gmaps = new GoogleMapsClient <MetadataTypeT>(gmapsApiKey, gmapsSigningKey, metadataDecoder, geocodingDecoder, cache); photospheres.CubemapNeeded += Photosphere_CubemapNeeded; photospheres.SetIO(cache, codec); photospheres.SetDetailLevels(searchFOVs); }
public GoogleMapsStreamingClient(string apiKey, string signingKey, CachingStrategy cache) { ApiKey = apiKey ?? throw new ArgumentNullException(nameof(apiKey)); SigningKey = signingKey ?? throw new ArgumentNullException(nameof(signingKey)); Cache = cache ?? throw new ArgumentNullException(nameof(cache)); }
public GoogleMapsStreamingClient(string apiKey, string signingKey) : this(apiKey, signingKey, CachingStrategy.GetTempCache()) { }
public void ClearCache() { CachingStrategy.ClearAll(); }
public virtual void TestDataTransferProtocol() { Random random = new Random(); int oneMil = 1024 * 1024; Path file = new Path("dataprotocol.dat"); int numDataNodes = 1; Configuration conf = new HdfsConfiguration(); conf.SetInt(DFSConfigKeys.DfsReplicationKey, numDataNodes); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(numDataNodes ).Build(); try { cluster.WaitActive(); datanode = cluster.GetFileSystem().GetDataNodeStats(HdfsConstants.DatanodeReportType .Live)[0]; dnAddr = NetUtils.CreateSocketAddr(datanode.GetXferAddr()); FileSystem fileSys = cluster.GetFileSystem(); int fileLen = Math.Min(conf.GetInt(DFSConfigKeys.DfsBlockSizeKey, 4096), 4096); CreateFile(fileSys, file, fileLen); // get the first blockid for the file ExtendedBlock firstBlock = DFSTestUtil.GetFirstBlock(fileSys, file); string poolId = firstBlock.GetBlockPoolId(); long newBlockId = firstBlock.GetBlockId() + 1; recvBuf.Reset(); sendBuf.Reset(); // bad version recvOut.WriteShort((short)(DataTransferProtocol.DataTransferVersion - 1)); sendOut.WriteShort((short)(DataTransferProtocol.DataTransferVersion - 1)); SendRecvData("Wrong Version", true); // bad ops sendBuf.Reset(); sendOut.WriteShort((short)DataTransferProtocol.DataTransferVersion); sendOut.WriteByte(OP.WriteBlock.code - 1); SendRecvData("Wrong Op Code", true); /* Test OP_WRITE_BLOCK */ sendBuf.Reset(); DataChecksum badChecksum = Org.Mockito.Mockito.Spy(DefaultChecksum); Org.Mockito.Mockito.DoReturn(-1).When(badChecksum).GetBytesPerChecksum(); WriteBlock(poolId, newBlockId, badChecksum); recvBuf.Reset(); SendResponse(DataTransferProtos.Status.Error, null, null, recvOut); SendRecvData("wrong bytesPerChecksum while writing", true); sendBuf.Reset(); recvBuf.Reset(); WriteBlock(poolId, ++newBlockId, DefaultChecksum); PacketHeader hdr = new PacketHeader(4, 0, 100, false, -1 - random.Next(oneMil), false ); // size of packet // offset in block, // seqno // last packet // bad datalen hdr.Write(sendOut); SendResponse(DataTransferProtos.Status.Success, string.Empty, null, recvOut); new PipelineAck(100, new int[] { PipelineAck.CombineHeader(PipelineAck.ECN.Disabled , DataTransferProtos.Status.Error) }).Write(recvOut); SendRecvData("negative DATA_CHUNK len while writing block " + newBlockId, true); // test for writing a valid zero size block sendBuf.Reset(); recvBuf.Reset(); WriteBlock(poolId, ++newBlockId, DefaultChecksum); hdr = new PacketHeader(8, 0, 100, true, 0, false); // size of packet // OffsetInBlock // sequencenumber // lastPacketInBlock // chunk length hdr.Write(sendOut); sendOut.WriteInt(0); // zero checksum sendOut.Flush(); //ok finally write a block with 0 len SendResponse(DataTransferProtos.Status.Success, string.Empty, null, recvOut); new PipelineAck(100, new int[] { PipelineAck.CombineHeader(PipelineAck.ECN.Disabled , DataTransferProtos.Status.Success) }).Write(recvOut); SendRecvData("Writing a zero len block blockid " + newBlockId, false); /* Test OP_READ_BLOCK */ string bpid = cluster.GetNamesystem().GetBlockPoolId(); ExtendedBlock blk = new ExtendedBlock(bpid, firstBlock.GetLocalBlock()); long blkid = blk.GetBlockId(); // bad block id sendBuf.Reset(); recvBuf.Reset(); blk.SetBlockId(blkid - 1); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", 0L, fileLen, true , CachingStrategy.NewDefaultStrategy()); SendRecvData("Wrong block ID " + newBlockId + " for read", false); // negative block start offset -1L sendBuf.Reset(); blk.SetBlockId(blkid); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", -1L, fileLen, true , CachingStrategy.NewDefaultStrategy()); SendRecvData("Negative start-offset for read for block " + firstBlock.GetBlockId( ), false); // bad block start offset sendBuf.Reset(); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", fileLen, fileLen, true, CachingStrategy.NewDefaultStrategy()); SendRecvData("Wrong start-offset for reading block " + firstBlock.GetBlockId(), false ); // negative length is ok. Datanode assumes we want to read the whole block. recvBuf.Reset(); ((DataTransferProtos.BlockOpResponseProto)DataTransferProtos.BlockOpResponseProto .NewBuilder().SetStatus(DataTransferProtos.Status.Success).SetReadOpChecksumInfo (DataTransferProtos.ReadOpChecksumInfoProto.NewBuilder().SetChecksum(DataTransferProtoUtil .ToProto(DefaultChecksum)).SetChunkOffset(0L)).Build()).WriteDelimitedTo(recvOut ); sendBuf.Reset(); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", 0L, -1L - random. Next(oneMil), true, CachingStrategy.NewDefaultStrategy()); SendRecvData("Negative length for reading block " + firstBlock.GetBlockId(), false ); // length is more than size of block. recvBuf.Reset(); SendResponse(DataTransferProtos.Status.Error, null, "opReadBlock " + firstBlock + " received exception java.io.IOException: " + "Offset 0 and length 4097 don't match block " + firstBlock + " ( blockLen 4096 )", recvOut); sendBuf.Reset(); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", 0L, fileLen + 1, true, CachingStrategy.NewDefaultStrategy()); SendRecvData("Wrong length for reading block " + firstBlock.GetBlockId(), false); //At the end of all this, read the file to make sure that succeeds finally. sendBuf.Reset(); sender.ReadBlock(blk, BlockTokenSecretManager.DummyToken, "cl", 0L, fileLen, true , CachingStrategy.NewDefaultStrategy()); ReadFile(fileSys, file, fileLen); } finally { cluster.Shutdown(); } }