void MountBlobs() { IBlob file_blob = null; BlobInfos = new List<BlobInfo>(); foreach (var ccf in IN_CompileJob.OUT_CompiledCueFiles) { var bi = new BlobInfo(); BlobInfos.Add(bi); switch (ccf.Type) { case CompiledCueFileType.BIN: case CompiledCueFileType.Unknown: { //raw files: var blob = new Disc.Blob_RawFile { PhysicalPath = ccf.FullPath }; OUT_Disc.DisposableResources.Add(file_blob = blob); bi.Length = blob.Length; break; } case CompiledCueFileType.ECM: { var blob = new Disc.Blob_ECM(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(ccf.FullPath); bi.Length = blob.Length; break; } case CompiledCueFileType.WAVE: { var blob = new Disc.Blob_WaveFile(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(ccf.FullPath); bi.Length = blob.Length; break; } case CompiledCueFileType.DecodeAudio: { FFMpeg ffmpeg = new FFMpeg(); if (!ffmpeg.QueryServiceAvailable()) { throw new DiscReferenceException(ccf.FullPath, "No decoding service was available (make sure ffmpeg.exe is available. even though this may be a wav, ffmpeg is used to load oddly formatted wave files. If you object to this, please send us a note and we'll see what we can do. It shouldn't be too hard.)"); } AudioDecoder dec = new AudioDecoder(); byte[] buf = dec.AcquireWaveData(ccf.FullPath); var blob = new Disc.Blob_WaveFile(); OUT_Disc.DisposableResources.Add(file_blob = blob); blob.Load(new MemoryStream(buf)); bi.Length = buf.Length; break; } default: throw new InvalidOperationException(); } //switch(file type) //wrap all the blobs with zero padding bi.Blob = new Disc.Blob_ZeroPadAdapter(file_blob, bi.Length); } }
public static FileInfo ToFileInfo(this File file, BlobInfo blobInfo) { return(new FileInfo { Id = file.Id, FileName = file.Filename, // EES-1637 Prefer name field on blob Name = blobInfo.Name.IsNullOrEmpty() ? file.Filename : blobInfo.Name, Path = file.Path(), Size = blobInfo.Size, Type = file.Type }); }
public Uri GetThumbEndpoint(string blobName, string label, Instructions instructions = null) { var blobInfo = BlobInfo.FromName(blobName); if (blobInfo == null || instructions == null) { return(null); } var uri = $"/{ImageResizerInfra.VirtualFileSystemPrefix}/{Sanitize(blobInfo.GetThumbnailName())}{instructions.ToQueryString()}"; return(new Uri(uri, UriKind.Relative)); }
private static Mock <IBlobStorageProvider> GetBlobStorageProviderMoq() { var blobStorageProviderMoq = new Mock <IBlobStorageProvider>(); var blobInfo = new BlobInfo() { Size = (int)ModuleConstants.Settings.General.ImportFileMaxSize.DefaultValue * ModuleConstants.MByte }; blobStorageProviderMoq.Setup(x => x.GetBlobInfoAsync(It.IsAny <string>())) .Returns(Task.FromResult(blobInfo)); return(blobStorageProviderMoq); }
public static async Task <List <string> > RunOrchestrator( [OrchestrationTrigger] IDurableOrchestrationContext context, [Blob("test1/random-personal-info1.json", FileAccess.Read, Connection = "AzureWebJobsStorage")] Stream InStream, ILogger log ) { var outputs = new List <string>(); BlobInfo info = context.GetInput <BlobInfo>(); // True on completion does not mean successful bool test = await context.CallActivityAsync <bool>("mainActivity", info); return(outputs); }
/// <summary> /// Get an open stream for the blob contents. /// </summary> /// <param name="blobName">Virtual blob name.</param> /// <returns>The open <see cref="Stream"/> instance.</returns> public virtual Stream ReadStream(string blobName) { var blobInfo = BlobInfo.FromName(blobName); if (blobInfo == null) { return(Stream.Null); } using (var webclient = new WebClient()) { return(webclient.OpenRead(GetEndpoint(blobName))); } }
public IAsyncOperation <ItemKey> PutBlobInItem(RecordItem item, BlobInfo blobInfo, IInputStream source) { item.ValidateRequired("item"); return(AsyncInfo.Run( async cancelToken => { Blob blob = await UploadBlob(blobInfo, source).AsTask(cancelToken); item.AddOrUpdateBlob(blob); return await UpdateItemAsync(item).AsTask(cancelToken); } )); }
public async Task ReadAsJsonAsync() { var blobId = await PutTestBlob("a.txt", Encoding.UTF8.GetBytes("{ \"Name\": \"ActorInBlob\", \"Stage\": \"Start\" }")); var blobInfo = new BlobInfo(blobId, "a.txt"); var stm = new DummyStateMachine(); stm.Store = _store; var result = await blobInfo.ReadAsJsonAsync <ActorInfo>(stm); Assert.Equal("ActorInBlob", result.Name); Assert.Equal("Start", result.Stage); }
public async Task ReadAllBytesAsync() { var blobId = await PutTestBlob("a.txt", new byte[] { 1, 2, 3 }); var blobInfo = new BlobInfo(blobId, "a.txt"); var stm = new DummyStateMachine(); stm.Store = _store; var result = await blobInfo.ReadAllBytesAsync(stm); Assert.Equal( JsonConvert.SerializeObject(new byte[] { 1, 2, 3 }), JsonConvert.SerializeObject(result)); }
// This function will get triggered/executed when a new message is written // on an Azure Queue called profileRequestQueue. public async static Task ProcessQueueMessageAsyncCancellationToken( [QueueTrigger("profileRequestQueue")] BlobInfo blobName, [Blob("profiles/{BLOBName}", FileAccess.Read)] Stream blobInput, [Blob("profiles/{BlobNameWithNoExtension}_Profile.docx")] CloudBlockBlob blobOutput, CancellationToken token) { Console.WriteLine("In Web Job"); using (Stream output = blobOutput.OpenWrite()) { await CompressProfile(blobInput, output); //Update the Table Information UpdateEntity(GetEntity(blobName.Profession, blobName.ProfileId.ToString()), output.Length); } }
public async static Task DeleteImagesAsync( [QueueTrigger("deleterequest")] BlobInfo blobInfo, [Blob(BlobInfo.ImageNameLg)] CloudBlockBlob blobLarge, [Blob(BlobInfo.ImageNameXs)] CloudBlockBlob blobExtraSmall, [Blob(BlobInfo.ImageNameSm)] CloudBlockBlob blobSmall, [Blob(BlobInfo.ImageNameMd)] CloudBlockBlob blobMedium) { await blobExtraSmall.DeleteAsync(); await blobSmall.DeleteAsync(); await blobMedium.DeleteAsync(); await blobLarge.DeleteAsync(); }
/// <summary> /// Gets the BLOB info. /// </summary> /// <returns></returns> public BlobInfo GetBlobInfo() { if (!this.BlobUid.HasValue) { return(null); } if (_cacheBlobInfo != null && _cacheBlobInfo.Uid == this.BlobUid) { return(_cacheBlobInfo); } _cacheBlobInfo = BlobStorage.Providers[this.BlobStorageProvider].GetInfo(this.BlobUid.Value); return(_cacheBlobInfo); }
/// <summary> /// Streams the commited. /// </summary> /// <param name="newBlobInfo">The new BLOB info.</param> /// <param name="oldBlobInfo">The old BLOB info.</param> public override void StreamCommited(BlobInfo newBlobInfo, BlobInfo oldBlobInfo) { if (newBlobInfo == null) { throw new NullReferenceException("Null exception"); } if (newBlobInfo.ContentType == null) { throw new NullReferenceException("ContentType must be no null "); } if (newBlobInfo.FileName == null) { throw new NullReferenceException("FileName must be no null "); } try { BlobStorage.RaiseCommitingEvent(newBlobInfo, oldBlobInfo); /* * * // copy an object to itself with an updated meta data * CopyObjectRequest request = new CopyObjectRequest(_Service, this.BucketName, newBlobInfo.Uid.ToString()); * request.Metadata["FileName"] = newBlobInfo.FileName; * request.ContentType = newBlobInfo.ContentType; * request.ContentDisposition = String.Format("filename={0}", newBlobInfo.FileName); * * * //request.Metadata["ContentType"] = newBlobInfo.ContentType; * //request.Metadata["OwnerKey"] = newBlobInfo.OwnerKey; * //request.Metadata["OwnerType"] = newBlobInfo.OwnerType; * //request.Metadata["DataSize"] = newBlobInfo.ContentSize.ToString(); * //request.Metadata["ContentType"] = newBlobInfo.ContentType; * //request.Metadata["FileName"] = newBlobInfo.FileName; * //request.Metadata["Created"] = newBlobInfo.Created.ToUniversalTime().ToString("u"); * //request.Metadata["AllowSearch"] = newBlobInfo.AllowSearch.ToString(); * * using (CopyObjectResponse response = request.GetResponse()) * { * } * */ BlobStorage.RaiseCommitedEvent(newBlobInfo, oldBlobInfo); } catch (Exception) { throw; } }
/// <summary> /// Get a <see cref="Uri"/> for the specified blob name thumb. /// </summary> /// <param name="blobName">The blob to get the thumb.</param> /// <param name="label">The friendly label for SEO and presentation related functionaliy.</param> /// <param name="cSubdomain">Indicates whether the cloudinary should cycle through different subdomains for performance.</param> /// <param name="transformation">The transformations to apply in the resulting thumb.</param> /// <returns>A <see cref="Uri"/> instance for the thumb.</returns> public Uri GetThumbEndpoint(string blobName, string label, bool cSubdomain = true, Transformation transformation = null) { var blobInfo = BlobInfo.FromName(blobName); if (blobInfo == null || transformation == null) { return(null); } if (blobInfo.ContentType.Contains("video")) { return(new Uri(_client.Api.UrlVideoUp.CSubDomain(cSubdomain).Transform(transformation).BuildUrl(blobInfo.GetThumbnailName()), UriKind.Absolute)); } return(new Uri(_client.Api.UrlImgUp.CSubDomain(cSubdomain).Transform(transformation).BuildUrl(blobInfo.GetThumbnailName()), UriKind.Absolute)); }
/// <summary> /// Download blob payload /// </summary> /// <param name="entity">Blob info entity</param> /// <param name="stream">Download stream</param> /// <returns>Task</returns> public async Task DownloadAsync(BlobInfo entity, Stream stream) { var request = new GetObjectRequest { BucketName = bucketName, Key = entity.Id.ToString(), }; GetObjectResponse response = await s3client.GetObjectAsync(request); if (response.ResponseStream != null) { entity.Size = response.ResponseStream.Length; response.ResponseStream.CopyTo(stream); } }
public static ContentFile ToContentModel(this BlobInfo blobInfo) { if (blobInfo == null) { throw new ArgumentNullException(nameof(blobInfo)); } var retVal = new ContentFile(); retVal.InjectFrom(blobInfo); retVal.Name = blobInfo.FileName; retVal.MimeType = blobInfo.ContentType; retVal.Size = blobInfo.Size.ToString(); retVal.ModifiedDate = blobInfo.ModifiedDate; return(retVal); }
public void Upload_And_Download() { var r = new AzureBlobRepository(GetCloudBlobContainer()); var blob = new BlobInfo($"AzureBlobRepositoryTest/Upload_And_Download/{Guid.NewGuid()}"); var payload = "payload"; Upload(blob, payload, r); var result = Download(blob.Id, r); if (!useMockContainer) { Assert.Equal(payload, result); } r.Delete(blob); }
/// <summary> /// Writes a stream to the blob. /// </summary> /// <param name="blobName">Virtual blob name.</param> /// <param name="blobStream">The <see cref="Stream"/> with data to be writen to the blob.</param> public virtual void WriteStream(string blobName, Stream blobStream) { blobName = blobName ?? throw new ArgumentNullException(nameof(blobName), nameof(AzureStorageService)); blobStream = blobStream ?? throw new ArgumentNullException(nameof(blobStream), nameof(AzureStorageService)); var blobInfo = BlobInfo.FromName(blobName); if (blobInfo == null) { throw new ArgumentException(nameof(AzureStorageService), nameof(blobName)); } var blockBlob = GetBlobReference(blobName); blockBlob.UploadFromStream(blobStream); }
private Uri BuildUriBase64(BlobInfo blobInfo, string label, int?width = null, int?height = null, string bgColor = null, string fgColor = null) { var dynamicTextImage = new DynamicTextImage( width ?? 500, height ?? 500, bgColor ?? _configs.DefaultThumbBackgroundHexColor, fgColor ?? _configs.DefaultThumbForegroundHexColor ); using (var imageStream = dynamicTextImage.Build(label)) { var encodedData = Convert.ToBase64String(imageStream.ToArray()); var embeddedUri = $"data:{blobInfo.ContentType};base64,{encodedData}"; return(new Uri(embeddedUri, UriKind.Absolute)); } }
/// <summary> /// Search folders and blobs in folder /// </summary> /// <param name="folderUrl">absolute or relative path</param> /// <param name="keyword"></param> /// <returns></returns> public virtual BlobSearchResult Search(string folderUrl, string keyword) { var retVal = new BlobSearchResult(); folderUrl = folderUrl ?? _basePublicUrl; var storageFolderPath = GetStoragePathFromUrl(folderUrl); ValidatePath(storageFolderPath); if (!Directory.Exists(storageFolderPath)) { return(retVal); } var directories = String.IsNullOrEmpty(keyword) ? Directory.GetDirectories(storageFolderPath) : Directory.GetDirectories(storageFolderPath, "*" + keyword + "*", SearchOption.AllDirectories); foreach (var directory in directories) { var directoryInfo = new DirectoryInfo(directory); var folder = new BlobFolder { Name = Path.GetFileName(directory), Url = GetAbsoluteUrlFromPath(directory), ParentUrl = GetAbsoluteUrlFromPath(directoryInfo.Parent.FullName) }; folder.RelativeUrl = GetRelativeUrl(folder.Url); retVal.Folders.Add(folder); } var files = String.IsNullOrEmpty(keyword) ? Directory.GetFiles(storageFolderPath) : Directory.GetFiles(storageFolderPath, "*" + keyword + "*.*", SearchOption.AllDirectories); foreach (var file in files) { var fileInfo = new FileInfo(file); var blobInfo = new BlobInfo { Url = GetAbsoluteUrlFromPath(file), ContentType = MimeTypeResolver.ResolveContentType(fileInfo.Name), Size = fileInfo.Length, FileName = fileInfo.Name, ModifiedDate = fileInfo.LastWriteTimeUtc }; blobInfo.RelativeUrl = GetRelativeUrl(blobInfo.Url); retVal.Items.Add(blobInfo); } return(retVal); }
public void FindSingleOutputBlob() { var blobInfoA = new BlobInfo(PrimaryKeyUtils.Generate <Guid>(), "a.txt"); var blobInfoB = new BlobInfo(PrimaryKeyUtils.Generate <Guid>(), "b.txt"); var actorInfo = new ActorInfo() { Outputs = new[] { blobInfoA, blobInfoB } }; Assert.True(blobInfoA == actorInfo.FindSingleOutputBlob("a.txt")); Assert.True(blobInfoB == actorInfo.FindSingleOutputBlob("b.txt")); Assert.Throws <KeyNotFoundException>(() => actorInfo.FindSingleOutputBlob("c.txt")); }
public static webModels.Page ToPageWebModel(this BlobInfo item) { var retVal = new webModels.Page(); retVal.Name = item.FileName; //Get language from file name file format name.lang.extension var fileNameParts = item.FileName.Split('.'); if (fileNameParts.Count() == 3) { retVal.Language = fileNameParts[1]; retVal.Name = fileNameParts[0] + "." + fileNameParts[2]; } retVal.ContentType = item.ContentType; retVal.ModifiedDate = item.ModifiedDate ?? default(DateTime); return(retVal); }
public void FindOutputBlob() { var blobInfoA = new BlobInfo(PrimaryKeyUtils.Generate <Guid>(), "a.txt"); var blobInfoB = new BlobInfo(PrimaryKeyUtils.Generate <Guid>(), "b.txt"); var actorInfo = new ActorInfo() { Outputs = new[] { blobInfoA, blobInfoB } }; Assert.True(blobInfoA == actorInfo.FindOutputBlob("a.txt")); Assert.True(blobInfoB == actorInfo.FindOutputBlob("b.txt")); Assert.True(null == actorInfo.FindOutputBlob("c.txt")); }
public static bool mainActivity([ActivityTrigger] BlobInfo info, ILogger log, [Blob("test1/{info.filename}", FileAccess.Read, Connection = "AzureWebJobsStorage")] Stream InStream, [Blob("test1/output-{info.filename}", FileAccess.Write, Connection = "AzureWebJobsStorage")] Stream OutStream ) { // Create Pipe and JSON blob stream IEnumerable <Row> humans = JsonReaderExtensions.convertToJsonIterable(InStream); // IEnumerable<Pipe> pipeline = JsonReaderExtensions.convertToJsonIterable(info.pipeline); Func <Row, object> mapFunction = value => { var obj = new { name = value.name, age = value.age + 10, eyeColor = value.eyeColor }; return(obj); }; Func <dynamic, bool> filterPredicate = value => { return(value.eyeColor == "green"); }; Func <IEnumerable, IEnumerable> pipeline = Activities.pipelineMaker( Activities.mapMaker <Row, dynamic>(mapFunction), Activities.eachMaker(), Activities.filterMaker(filterPredicate) ); // Prepare Output stream writer using (JsonTextWriter wr = JsonReaderExtensions.InitJsonOutStream(OutStream)) { wr.WriteStartArray(); foreach (var h in pipeline(humans)) { wr.SerialiseJsonToStream <dynamic>(h); } wr.WriteEndArray(); } return(true); }
public IAsyncAction UploadAsync(IRecord record, string contentType, IInputStream stream) { if (record == null) { throw new ArgumentNullException("record"); } return(AsyncInfo.Run(cancelToken => Task.Run(async() => { if (string.IsNullOrEmpty(contentType)) { contentType = HttpStreamer.OctetStreamMimeType; } var blobInfo = new BlobInfo(contentType); await record.PutBlobInItem(Item, blobInfo, stream).AsTask(cancelToken); }))); }
public void Upload_And_Download() { using (var s3client = GetAWSS3Client()) { var r = new AWSS3Repository(s3client, bucketName); var blob = new BlobInfo($"AWSS3RepositoryTest/Upload_And_Download/{Guid.NewGuid()}"); var payload = "payload"; Upload(blob, payload, r); var result = Download(blob.Id, r); if (!useMockContainer) { Assert.Equal(payload, result); } r.Delete(blob); } }
private async Task <bool> IsCsvFile(BlobInfo blob) { await using var mimeTypeStream = await _fileStorageService.StreamBlob(blob); var hasMatchingMimeType = await _fileTypeService.HasMatchingMimeType( mimeTypeStream, AllowedMimeTypesByFileType[FileType.Data] ); if (!hasMatchingMimeType) { return(false); } await using var encodingStream = await _fileStorageService.StreamBlob(blob); return(_fileTypeService.HasMatchingEncodingType(encodingStream, CsvEncodingTypes)); }
public async Task <BlobInfo> UpdateBlobAsync(string blobId, string blobName, string blobType, Stream blobData) { var start = DateTime.Now; try { if (string.IsNullOrEmpty(blobId)) { throw new ArgumentNullException(nameof(blobId)); } if (blobData == null) { throw new ArgumentNullException(nameof(blobData)); } if (string.IsNullOrEmpty(blobType)) { blobType = _mimeTypeResolver.GetMimeType(blobName); } var blobInfo = new BlobInfo { Id = blobId, Name = blobName, Type = blobType, Size = blobData.Length, Time = DateTime.UtcNow }; WriteBlobInfo(blobId, blobInfo); await WriteBlobDataAsync(blobId, blobData); _performanceLog.Log("UpdateBlob", start); return(blobInfo); } catch (Exception e) { _performanceLog.Log("UpdateBlob", start, e); throw; } }
private RunActorParam PrepareDataRangeValidatorRunParams() { if (!InitialBlobs.Any(x => x.Name.StartsWith("Range"))) { return(null); } var range = InitialBlobs.First(x => x.Name.StartsWith("Range")); var rangeBlob = new BlobInfo(range.Id, range.Name.Replace("Range", "Main")); var originData = InitialBlobs.FindSingleBlob("data.txt"); var dataBlob = new BlobInfo(originData.Id, "stdin.txt"); return(new RunActorParam("HackRunActor", new BlobInfo[] { rangeBlob, dataBlob, InitialBlobs.FindSingleBlob("limit.json") }, "ValidateData")); }
/// <summary> /// Reads the stream. /// </summary> /// <param name="blobInfo">The BLOB info.</param> /// <returns></returns> public override System.IO.Stream ReadStream(BlobInfo blobInfo) { Stream stream = null; try { BlobStorage.RaiseReadingEvent(blobInfo); GetObjectRequest request = new GetObjectRequest(_Service, this.BucketName, blobInfo.Uid.ToString(), false); GetObjectResponse response = request.GetResponse(); stream = response.GetResponseStream(); BlobStorage.RaiseReadedEvent(blobInfo); } catch (Exception exception) { Trace.WriteLine(exception); throw; } return(stream); }
/// <summary> /// Get blob info by url /// </summary> /// <param name="blobUrl"></param> /// <returns></returns> public virtual BlobInfo GetBlobInfo(string blobUrl) { if (string.IsNullOrEmpty(blobUrl)) { throw new ArgumentNullException(nameof(blobUrl)); } var uri = blobUrl.IsAbsoluteUrl() ? new Uri(blobUrl) : new Uri(_cloudBlobClient.BaseUri, blobUrl.TrimStart('/')); BlobInfo retVal = null; try { var cloudBlob = _cloudBlobClient.GetBlobReferenceFromServer(uri); retVal = ConvertBlobToBlobInfo(cloudBlob); } catch (Exception) { //Azure blob storage client does not provide method to check blob url exist without throwing exception } return(retVal); }
private void FindUnusedRobotPieces() { ImageProcessor imageProcessor = m_MainModel.ImageProcessor; List<BlobInfo> blobInfos = m_MainModel.BoardImageModel.FindUnusedRobotPieces(imageProcessor.BlackAndWhiteImage); imageProcessor.DrawUnusedRobotPieces(blobInfos); if (blobInfos.Count > 0) { m_PickupPieceInfo = blobInfos[0]; } }
protected void Page_Load(object sender, EventArgs e) { string hash = QueryHelper.GetString("hash", string.Empty); string path = QueryHelper.GetString("path", string.Empty); // Validate hash if (ValidationHelper.ValidateHash("?path=" + URLHelper.EscapeSpecialCharacters(path), hash, false)) { if (path.StartsWithCSafe("~")) { path = Server.MapPath(path); } // Get file content from blob BlobInfo bi = new BlobInfo(path); // Check if blob exists if (BlobInfoProvider.BlobExists(bi)) { // Clear response. CookieHelper.ClearResponseCookies(); Response.Clear(); // Set the revalidation SetRevalidation(); string etag = bi.ETag; DateTime lastModified = ValidationHelper.GetDateTime(bi.GetMetadata(ContainerInfoProvider.LAST_WRITE_TIME), DateTimeHelper.ZERO_TIME); // Set correct response content type SetResponseContentType(path); // Client caching - only on the live site if (AllowCache && AllowClientCache && ETagsMatch(etag, lastModified)) { // Set the file time stamps to allow client caching SetTimeStamps(lastModified); RespondNotModified(etag); return; } Stream stream = BlobInfoProvider.GetBlobContent(bi); SetDisposition(Path.GetFileName(path), Path.GetExtension(path)); // Setup Etag property ETag = etag; if (AllowCache) { // Set the file time stamps to allow client caching SetTimeStamps(lastModified); Response.Cache.SetETag(etag); } else { SetCacheability(); } // Send headers Response.Flush(); Byte[] buffer = new Byte[StorageHelper.BUFFER_SIZE]; int bytesRead = stream.Read(buffer, 0, StorageHelper.BUFFER_SIZE); // Copy data from blob stream to cache while (bytesRead > 0) { // Write the data to the current output stream Response.OutputStream.Write(buffer, 0, bytesRead); // Flush the data to the output Response.Flush(); // Read next part of data bytesRead = stream.Read(buffer, 0, StorageHelper.BUFFER_SIZE); } stream.Close(); CompleteRequest(); } else { NotFound(); } } else { URLHelper.Redirect(ResolveUrl("~/CMSMessages/Error.aspx?title=" + ResHelper.GetString("general.badhashtitle") + "&text=" + ResHelper.GetString("general.badhashtext"))); } }
internal List<BlobInfo> FindUnusedRobotPieces(Image<Gray, Byte> image) { List<BlobInfo> blobInfos = new List<BlobInfo>(); using (MemStorage storage = new MemStorage()) //allocate storage for contour approximation { for (Contour<Point> contours = image.FindContours( Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage); contours != null; contours = contours.HNext) { Contour<Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.05, storage); BlobInfo blobInfo = new BlobInfo(currentContour); Debug.WriteLine("BlobInfo: " + blobInfo.Area + ", center: " + blobInfo.CameraCenter); // We are only interested in the blobs that are left of the grid if (blobInfo.CameraCenter.X + Math.Sqrt(blobInfo.Area)/2 < m_OutsideRectangle.center.X - m_OutsideRectangle.size.Width/2 - 10) { blobInfos.Add(new BlobInfo(currentContour)); } } } return blobInfos; }
private QState RobotsTurn(IQEvent qEvent) { LogEvent(MethodBase.GetCurrentMethod().Name, qEvent); switch (qEvent.QSignal) { case (int)QSignals.Entry: int bestMoveIndex = m_MainModel.BoardManager.GetBestMove(); m_BestMove = new Location(bestMoveIndex); m_MainModel.NextMove = m_BestMove.ToString(); return null; case (int)ControllerSignal.ProcessFrame: this.Dispatch(new ControllerEvent(ControllerSignal.FindUnusedPiece)); return null; case (int)ControllerSignal.FindUnusedPiece: FindUnusedRobotPieces(); if (m_PickupPieceInfo == null) { TransitionTo(this.NoPieceFound); } else { TransitionTo(this.MovingPiece); } return null; case (int)ControllerSignal.RobotMoveComplete: //Check that the move was successful if (m_MainModel.BoardImageModel.IsFilled(m_BestMove.X, m_BestMove.Y, m_MainModel.ImageProcessor.BlackAndWhiteImage)) { // the move succeeded m_MainModel.BoardManager.MakeMove(m_BestMove.X, m_BestMove.Y); if (m_MainModel.BoardManager.Board.GameIsOver) { HandleGameOver(); } else { TransitionTo(this.WaitingForHuman); } } else { // try again this.Dispatch(new ControllerEvent(ControllerSignal.FindUnusedPiece)); } return null; case (int)QSignals.Exit: m_PickupPieceInfo = null; return null; } return this.Initialized; }