コード例 #1
0
		public virtual Face[] GetRecognizedFaces()
		{
			sbyte[] bytes = GetByteArray(TagFaceRecognitionInfo);
			if (bytes == null)
			{
				return null;
			}
			RandomAccessReader reader = new ByteArrayReader(bytes);
			reader.SetMotorolaByteOrder(false);
			try
			{
				int faceCount = reader.GetUInt16(0);
				if (faceCount == 0)
				{
					return null;
				}
				Face[] faces = new Face[faceCount];
				for (int i = 0; i < faceCount; i++)
				{
					int offset = 4 + i * 44;
					string name = Sharpen.Extensions.Trim(reader.GetString(offset, 20, "ASCII"));
					string age = Sharpen.Extensions.Trim(reader.GetString(offset + 28, 20, "ASCII"));
					faces[i] = new Face(reader.GetUInt16(offset + 20), reader.GetUInt16(offset + 22), reader.GetUInt16(offset + 24), reader.GetUInt16(offset + 26), name, Age.FromPanasonicString(age));
				}
				return faces;
			}
			catch (IOException)
			{
				return null;
			}
		}
コード例 #2
0
 public virtual void ProcessChunk([NotNull] string fourCC, [NotNull] sbyte[] payload)
 {
     //        System.out.println("Chunk " + fourCC + " " + payload.length + " bytes");
     if (fourCC.Equals("EXIF"))
     {
         new ExifReader().Extract(new ByteArrayReader(payload), _metadata);
     }
     else
     {
         if (fourCC.Equals("ICCP"))
         {
             new IccReader().Extract(new ByteArrayReader(payload), _metadata);
         }
         else
         {
             if (fourCC.Equals("XMP "))
             {
                 new XmpReader().Extract(payload, _metadata);
             }
             else
             {
                 if (fourCC.Equals("VP8X") && payload.Length == 10)
                 {
                     RandomAccessReader reader = new ByteArrayReader(payload);
                     reader.SetMotorolaByteOrder(false);
                     try
                     {
                         // Flags
                         //                boolean hasFragments = reader.getBit(0);
                         bool isAnimation = reader.GetBit(1);
                         //                boolean hasXmp = reader.getBit(2);
                         //                boolean hasExif = reader.getBit(3);
                         bool hasAlpha = reader.GetBit(4);
                         //                boolean hasIcc = reader.getBit(5);
                         // Image size
                         int widthMinusOne = reader.GetInt24(4);
                         int heightMinusOne = reader.GetInt24(7);
                         WebpDirectory directory = new WebpDirectory();
                         directory.SetInt(WebpDirectory.TagImageWidth, widthMinusOne + 1);
                         directory.SetInt(WebpDirectory.TagImageHeight, heightMinusOne + 1);
                         directory.SetBoolean(WebpDirectory.TagHasAlpha, hasAlpha);
                         directory.SetBoolean(WebpDirectory.TagIsAnimation, isAnimation);
                         _metadata.AddDirectory(directory);
                     }
                     catch (IOException e)
                     {
                         Sharpen.Runtime.PrintStackTrace(e, System.Console.Error);
                     }
                 }
             }
         }
     }
 }
コード例 #3
0
		public virtual Face[] GetDetectedFaces()
		{
			sbyte[] bytes = GetByteArray(TagFaceDetectionInfo);
			if (bytes == null)
			{
				return null;
			}
			RandomAccessReader reader = new ByteArrayReader(bytes);
			reader.SetMotorolaByteOrder(false);
			try
			{
				int faceCount = reader.GetUInt16(0);
				if (faceCount == 0)
				{
					return null;
				}
				Face[] faces = new Face[faceCount];
				for (int i = 0; i < faceCount; i++)
				{
					int offset = 2 + i * 8;
					faces[i] = new Face(reader.GetUInt16(offset), reader.GetUInt16(offset + 2), reader.GetUInt16(offset + 4), reader.GetUInt16(offset + 6), null, null);
				}
				return faces;
			}
			catch (IOException)
			{
				return null;
			}
		}