示例#1
0
 static void CompileFcToQd(string sourcePath)
 {
     QDeterminant = new QDet();
     var FCconverter = Manufactory.CreateFlowChartConverter(ConverterTypes.JSON);
     FCconverter.ParseDocument(sourcePath);
     var actionList = new AList(FCconverter.GetBlocks(), FCconverter.GetLinks(), Opertaions);
     QDeterminant = actionList.getqdet();
     var result = new StringBuilder("");
     if (QDeterminant.QDeterminant.Count > 0)
     {
         result.Append("{");
         foreach (var qterm in QDeterminant.QDeterminant)
         {
             result.Append("(");
             if (!String.IsNullOrEmpty(qterm.Logical))
             {
                 result.Append(qterm.Logical).Append(",");
             }
             result.Append(qterm.Definitive).Append(");");
         }
         result.Remove(result.Length - 1, 1).Append("}");
     }
     Console.WriteLine("Save QD");
     File.WriteAllText(Path.GetDirectoryName(sourcePath)+@"\Qdeterminant.qd",result.ToString());
 }
示例#2
0
		public override IList<X509Crl> GetCRLsFromSignature()
		{
			IList<X509Crl> list = new AList<X509Crl>();
			try
			{
				// Add certificates contained in SignedData
                foreach (X509Crl crl in cmsSignedData.GetCrls
					("Collection").GetMatches(null))
				{					
					list.AddItem(crl);
				}
				// Add certificates in CAdES-XL certificate-values inside SignerInfo attribute if present
				SignerInformation si = cmsSignedData.GetSignerInfos().GetFirstSigner(signerId);
				if (si != null && si.UnsignedAttributes != null && si.UnsignedAttributes[PkcsObjectIdentifiers.IdAAEtsRevocationValues] != null)
				{
					RevocationValues revValues = RevocationValues.GetInstance(si.UnsignedAttributes[PkcsObjectIdentifiers.IdAAEtsRevocationValues].AttrValues[0]);
					foreach (CertificateList crlObj in revValues.GetCrlVals())
					{
						X509Crl crl = new X509Crl(crlObj);
						list.AddItem(crl);
					}
				}
			}
			/*catch (StoreException e)
			{
				throw new RuntimeException(e);
			}*/
			catch (CrlException e)
			{
				throw new RuntimeException(e);
			}
			return list;
		}
 public static Query GetQuery(Database database, string listDocId)
 {
     View view = database.GetView(ViewName);
     if (view.Map == null)
     {
         view.Map += (IDictionary<string, object> document, EmitDelegate emitter)=> 
         {
             if (Task.DocType.Equals(document.Get("type")))
             {
                 var keys = new AList<object>();
                 keys.AddItem(document.Get("list_id"));
                 keys.AddItem(document.Get("created_at"));
                 emitter(keys, document);
             }
         };
     }
     Query query = view.CreateQuery();
     query.Descending = true;
     IList<object> startKeys = new AList<object>();
     startKeys.AddItem(listDocId);
     startKeys.AddItem(new Dictionary<string, object>());
     IList<object> endKeys = new AList<object>();
     endKeys.AddItem(listDocId);
     query.StartKey = startKeys;
     query.EndKey = endKeys;
     return query;
 }
 public PreprocessorExpressionParser(TJS tjs, string script)
     : base(script)
 {
     //	private int mResult;
     mIDs = new AList<string>();
     mTJS = tjs;
 }
示例#5
0
 /// <exception cref="Kirikiri.Tjs2.VariantException"></exception>
 /// <exception cref="Kirikiri.Tjs2.TJSException"></exception>
 public virtual void AssignStructure(Dispatch2 dsp, AList<Dispatch2> stack)
 {
     // assign structured data from dsp
     //ArrayNI dicni = null;
     if (dsp.GetNativeInstance(DictionaryClass.ClassID) != null)
     {
         // copy from dictionary
         stack.AddItem(dsp);
         try
         {
             CustomObject owner = mOwner.Get();
             owner.Clear();
             DictionaryNI.AssignStructCallback callback = new DictionaryNI.AssignStructCallback
                 (stack, owner);
             dsp.EnumMembers(Interface.IGNOREPROP, callback, dsp);
         }
         finally
         {
             stack.Remove(stack.Count - 1);
         }
     }
     else
     {
         throw new TJSException(Error.SpecifyDicOrArray);
     }
 }
 /// <exception cref="NBoilerpipe.BoilerpipeProcessingException"></exception>
 public bool Process(TextDocument doc)
 {
     bool changes = false;
     IList<TextBlock> blocks = doc.GetTextBlocks();
     IList<TextBlock> blocksNew = new AList<TextBlock>();
     foreach (TextBlock tb in blocks)
     {
         string text = tb.GetText();
         string[] paragraphs = text.Split("[\n\r]+");
         if (paragraphs.Length < 2)
         {
             blocksNew.AddItem(tb);
             continue;
         }
         bool isContent = tb.IsContent();
         ICollection<string> labels = tb.GetLabels();
         foreach (string p in paragraphs)
         {
             TextBlock tbP = new TextBlock(p);
             tbP.SetIsContent(isContent);
             tbP.AddLabels(labels);
             blocksNew.AddItem(tbP);
             changes = true;
         }
     }
     if (changes)
     {
         blocks.Clear();
         Sharpen.Collections.AddAll(blocks, blocksNew);
     }
     return changes;
 }
示例#7
0
 public virtual void LogAllCommits()
 {
     IList<RevCommit> commits = new AList<RevCommit>();
     Git git = Git.Wrap(db);
     WriteTrashFile("Test.txt", "Hello world");
     git.Add().AddFilepattern("Test.txt").Call();
     commits.AddItem(git.Commit().SetMessage("initial commit").Call());
     git.BranchCreate().SetName("branch1").Call();
     Ref checkedOut = git.Checkout().SetName("branch1").Call();
     NUnit.Framework.Assert.AreEqual("refs/heads/branch1", checkedOut.GetName());
     WriteTrashFile("Test1.txt", "Hello world!");
     git.Add().AddFilepattern("Test1.txt").Call();
     commits.AddItem(git.Commit().SetMessage("branch1 commit").Call());
     checkedOut = git.Checkout().SetName("master").Call();
     NUnit.Framework.Assert.AreEqual("refs/heads/master", checkedOut.GetName());
     WriteTrashFile("Test2.txt", "Hello world!!");
     git.Add().AddFilepattern("Test2.txt").Call();
     commits.AddItem(git.Commit().SetMessage("branch1 commit").Call());
     Iterator<RevCommit> log = git.Log().All().Call().Iterator();
     NUnit.Framework.Assert.IsTrue(log.HasNext());
     NUnit.Framework.Assert.IsTrue(commits.Contains(log.Next()));
     NUnit.Framework.Assert.IsTrue(log.HasNext());
     NUnit.Framework.Assert.IsTrue(commits.Contains(log.Next()));
     NUnit.Framework.Assert.IsTrue(log.HasNext());
     NUnit.Framework.Assert.IsTrue(commits.Contains(log.Next()));
     NUnit.Framework.Assert.IsFalse(log.HasNext());
 }
示例#8
0
		public virtual void TestDatabase()
		{
			Send("PUT", "/database", Status.Created, null);
			IDictionary<string, object> dbInfo = (IDictionary<string, object>)Send("GET", "/database"
				, Status.Ok, null);
			NUnit.Framework.Assert.AreEqual(0, dbInfo.Get("doc_count"));
			NUnit.Framework.Assert.AreEqual(0, dbInfo.Get("update_seq"));
			NUnit.Framework.Assert.IsTrue((int)dbInfo.Get("disk_size") > 8000);
			Send("PUT", "/database", Status.PreconditionFailed, null);
			Send("PUT", "/database2", Status.Created, null);
			IList<string> allDbs = new AList<string>();
			allDbs.AddItem("cblite-test");
			allDbs.AddItem("database");
			allDbs.AddItem("database2");
			Send("GET", "/_all_dbs", Status.Ok, allDbs);
			dbInfo = (IDictionary<string, object>)Send("GET", "/database2", Status.Ok, null);
			NUnit.Framework.Assert.AreEqual("database2", dbInfo.Get("db_name"));
			Send("DELETE", "/database2", Status.Ok, null);
			allDbs.Remove("database2");
			Send("GET", "/_all_dbs", Status.Ok, allDbs);
			Send("PUT", "/database%2Fwith%2Fslashes", Status.Created, null);
			dbInfo = (IDictionary<string, object>)Send("GET", "/database%2Fwith%2Fslashes", Status
				.Ok, null);
			NUnit.Framework.Assert.AreEqual("database/with/slashes", dbInfo.Get("db_name"));
		}
示例#9
0
        public virtual IList<CertificateAndContext> GetCertificateBySubjectName(X509Name
			 subjectName)
		{
			IList<CertificateAndContext> list = new AList<CertificateAndContext>();
			try
			{
				string url = GetAccessLocation(certificate, X509ObjectIdentifiers.IdADCAIssuers);
				if (url != null)
				{
                    X509CertificateParser parser = new X509CertificateParser();
                    X509Certificate cert = parser.ReadCertificate(httpDataLoader.Get(url));

					if (cert.SubjectDN.Equals(subjectName))
					{
						list.Add(new CertificateAndContext());
					}
				}
			}
			catch (CannotFetchDataException)
			{
                return new List<CertificateAndContext>();
			}
			catch (CertificateException)
			{
                return new List<CertificateAndContext>();
			}
			return list;
		}
示例#10
0
 public LocalSymbolList(int localCount)
 {
     //private int mStartWrite;
     //private int mCountWrite;
     mLocalCountStart = localCount;
     //mStartWrite = mCountWrite = 0;
     mList = new AList<string>();
 }
示例#11
0
 public void Add(Kirikiri.Tjs2.ExprNode node)
 {
     if (mNodes == null)
     {
         mNodes = new AList<Kirikiri.Tjs2.ExprNode>();
     }
     mNodes.AddItem(node);
 }
 public virtual void TestJoinIterable()
 {
     IList<string> strings = new AList<string>();
     strings.Add("A");
     strings.Add("B");
     strings.Add("C");
     Sharpen.Tests.AreEqual("A;B;C", StringUtil.Join(strings.ToCharSequence(), ";"));
     Sharpen.Tests.AreEqual(string.Empty, StringUtil.Join(new AList<string>().ToCharSequence(), ";"));
 }
示例#13
0
 public LexicalAnalyzer(Compiler block, string script, bool isexpr, bool resultneeded
     )
 {
     mRetValDeque = new LongQue();
     mEmbeddableExpressionDataStack = new AList<EmbeddableExpressionData>();
     mValues = new AList<object>();
     mBlock = block;
     mIsExprMode = isexpr;
     mResultNeeded = resultneeded;
     mPrevToken = -1;
     int scriptLen = script.Length;
     if (mIsExprMode)
     {
         mText = new char[scriptLen + 2];
         Sharpen.Runtime.GetCharsForString(script, 0, scriptLen, mText, 0);
         mText[scriptLen] = ';';
         mText[scriptLen + 1] = (char)0;
     }
     else
     {
         //mStream = new StringStream(script+";");
         if (script.StartsWith("#!") == true)
         {
             // #! を // に置换
             mText = new char[scriptLen + 1];
             Sharpen.Runtime.GetCharsForString(script, 2, scriptLen, mText, 2);
             mText[0] = mText[1] = '/';
             mText[scriptLen] = (char)0;
         }
         else
         {
             //mStream = new StringStream( "//" + script.substring(2));
             mText = new char[scriptLen + 1];
             Sharpen.Runtime.GetCharsForString(script, 0, scriptLen, mText, 0);
             mText[scriptLen] = (char)0;
         }
     }
     //mStream = new StringStream(script);
     if (CompileState.mEnableDicFuncQuickHack)
     {
         //----- dicfunc quick-hack
         //mDicFunc = false; // デフォルト值なので入れる必要なし
         //if( mIsExprMode && (script.startsWith("[") == true || script.startsWith("%[") == true) ) {
         char c = script[0];
         if (mIsExprMode && (c == '[' || (c == '%' && script[1] == '[')))
         {
             mDicFunc = true;
         }
     }
     //mIfLevel = 0;
     //mPrevPos = 0;
     //mNestLevel = 0;
     mIsFirst = true;
     //mRegularExpression = false;
     //mBareWord = false;
     PutValue(null);
 }
示例#14
0
		/// <exception cref="System.IO.IOException"></exception>
        //private IDictionary<DerObjectIdentifier, Asn1Encodable> ExtendUnsignedAttributes(IDictionary
        //    <DerObjectIdentifier, Asn1Encodable> unsignedAttrs, X509Certificate signingCertificate
        //    , DateTime signingDate, CertificateSource optionalCertificateSource)
        private IDictionary ExtendUnsignedAttributes(IDictionary unsignedAttrs
            , X509Certificate signingCertificate, DateTime signingDate
            , CertificateSource optionalCertificateSource)
		{
			ValidationContext validationContext = certificateVerifier.ValidateCertificate(signingCertificate
				, signingDate, optionalCertificateSource, null, null);
			try
			{
				IList<X509CertificateStructure> certificateValues = new AList<X509CertificateStructure
					>();
				AList<CertificateList> crlValues = new AList<CertificateList>();
				AList<BasicOcspResponse> ocspValues = new AList<BasicOcspResponse>();
				foreach (CertificateAndContext c in validationContext.GetNeededCertificates())
				{
					if (!c.Equals(signingCertificate))
					{
                        certificateValues.AddItem(X509CertificateStructure.GetInstance(((Asn1Sequence)Asn1Object.FromByteArray
                            (c.GetCertificate().GetEncoded()))));
					}
				}
				foreach (X509Crl relatedcrl in validationContext.GetNeededCRL())
				{                    
					crlValues.AddItem(CertificateList.GetInstance((Asn1Sequence)Asn1Object.FromByteArray(((X509Crl
						)relatedcrl).GetEncoded())));
				}
				foreach (BasicOcspResp relatedocspresp in validationContext.GetNeededOCSPResp())
				{                    
					ocspValues.AddItem((BasicOcspResponse.GetInstance((Asn1Sequence)Asn1Object.FromByteArray(
						relatedocspresp.GetEncoded()))));
				}
				CertificateList[] crlValuesArray = new CertificateList[crlValues.Count];
				BasicOcspResponse[] ocspValuesArray = new BasicOcspResponse[ocspValues.Count];
				RevocationValues revocationValues = new RevocationValues(Sharpen.Collections.ToArray
					(crlValues, crlValuesArray), Sharpen.Collections.ToArray(ocspValues, ocspValuesArray
					), null);
				//unsignedAttrs.Put(PkcsObjectIdentifiers.IdAAEtsRevocationValues, new Attribute
                unsignedAttrs.Add(PkcsObjectIdentifiers.IdAAEtsRevocationValues, new BcCms.Attribute
					(PkcsObjectIdentifiers.IdAAEtsRevocationValues, new DerSet(revocationValues))
					);
				X509CertificateStructure[] certValuesArray = new X509CertificateStructure[certificateValues
					.Count];
				//unsignedAttrs.Put(PkcsObjectIdentifiers.IdAAEtsCertValues, new Attribute(PkcsObjectIdentifiers.IdAAEtsCertValues, new DerSet(new DerSequence(Sharpen.Collections.ToArray(certificateValues
                unsignedAttrs.Add(PkcsObjectIdentifiers.IdAAEtsCertValues, new BcCms.Attribute(PkcsObjectIdentifiers.IdAAEtsCertValues, new DerSet(new DerSequence(Sharpen.Collections.ToArray(certificateValues
					, certValuesArray)))));
			}
			catch (CertificateEncodingException e)
			{
				throw new RuntimeException(e);
			}
			catch (CrlException e)
			{
				throw new RuntimeException(e);
			}
			return unsignedAttrs;
		}
示例#15
0
 /// <exception cref="Kirikiri.Tjs2.VariantException"></exception>
 /// <exception cref="Kirikiri.Tjs2.TJSException"></exception>
 public static void SaveStructuredDataForObject(Dispatch2 dsp, AList<Dispatch2> stack
     , TextWriteStreamInterface stream, string indentstr)
 {
     // check object recursion
     int count = stack.Count;
     for (int i = 0; i < count; i++)
     {
         Dispatch2 d = stack[i];
         if (d == dsp)
         {
             // object recursion detected
             stream.Write("null /* object recursion detected */");
             return;
         }
     }
     // determin dsp's object type
     DictionaryNI dicni;
     ArrayNI arrayni;
     if (dsp != null)
     {
         dicni = (DictionaryNI)dsp.GetNativeInstance(DictionaryClass.ClassID);
         if (dicni != null)
         {
             // dictionary
             stack.AddItem(dsp);
             dicni.SaveStructuredData(stack, stream, indentstr);
             stack.Remove(stack.Count - 1);
             return;
         }
         else
         {
             arrayni = (ArrayNI)dsp.GetNativeInstance(ArrayClass.ClassID);
             if (arrayni != null)
             {
                 // array
                 stack.AddItem(dsp);
                 arrayni.SaveStructuredData(stack, stream, indentstr);
                 stack.Remove(stack.Count - 1);
                 return;
             }
             else
             {
                 // other objects
                 stream.Write("null /* (object) \"");
                 // stored as a null
                 Variant val = new Variant(dsp, dsp);
                 stream.Write(LexBase.EscapeC(val.AsString()));
                 stream.Write("\" */");
                 return;
             }
         }
     }
     stream.Write("null");
 }
示例#16
0
		public override IList<AdvancedSignature> GetSignatures()
		{
			IList<AdvancedSignature> infos = new AList<AdvancedSignature>();
			foreach (object o in this.cmsSignedData.GetSignerInfos().GetSigners())
			{
				SignerInformation i = (SignerInformation)o;
				CAdESSignature info = new CAdESSignature(this.cmsSignedData, i.SignerID);
				infos.AddItem(info);
			}
			return infos;
		}
 public override IList<X509Certificate> GetCertificates()
 {
     IList<X509Certificate> certificates = new AList<X509Certificate>();
     try
     {
         throw new System.NotImplementedException();
         //TODO jbonilla - validar como le hicimos en Intisign
         //KeyStore keyStore = KeyStore.GetInstance(keyStoreType);
         //keyStore.Load(new FileInputStream(keyStoreFile), password.ToCharArray());
         //Enumeration<string> aliases = keyStore.Aliases();
         //while (aliases.MoveNext())
         //{
         //    string alias = aliases.Current;
         //    Sharpen.Certificate onecert = keyStore.GetCertificate(alias);
         //    LOG.Info("Alias " + alias + " Cert " + ((X509Certificate)onecert).SubjectDN);
         //    if (onecert != null)
         //    {
         //        certificates.AddItem((X509Certificate)onecert);
         //    }
         //    if (keyStore.GetCertificateChain(alias) != null)
         //    {
         //        foreach (Sharpen.Certificate cert in keyStore.GetCertificateChain(alias))
         //        {
         //            LOG.Info("Alias " + alias + " Cert " + ((X509Certificate)cert).SubjectDN);
         //            if (!certificates.Contains(cert))
         //            {
         //                certificates.AddItem((X509Certificate)cert);
         //            }
         //        }
         //    }
         //}
     }
     catch (CertificateException)
     {
         throw new EncodingException(EncodingException.MSG.CERTIFICATE_CANNOT_BE_READ);
     }
     /*catch (KeyStoreException)
     {
         throw new EncodingException(EncodingException.MSG.CERTIFICATE_CANNOT_BE_READ);
     }*/
     catch (NoSuchAlgorithmException)
     {
         throw new EncodingException(EncodingException.MSG.CERTIFICATE_CANNOT_BE_READ);
     }
     catch (FileNotFoundException)
     {
         throw new EncodingException(EncodingException.MSG.CERTIFICATE_CANNOT_BE_READ);
     }
     catch (IOException)
     {
         throw new EncodingException(EncodingException.MSG.CERTIFICATE_CANNOT_BE_READ);
     }
     return certificates;
 }
 public virtual void SetUp()
 {
     Com.Drew.Metadata.Metadata metadata = new Com.Drew.Metadata.Metadata();
     IList<sbyte[]> jpegSegments = new AList<sbyte[]>();
     jpegSegments.Add(FileUtil.ReadBytes("Tests/Data/withXmpAndIptc.jpg.app1.1"));
     new XmpReader().ReadJpegSegments(jpegSegments.AsIterable(), metadata, JpegSegmentType.App1);
     ICollection<XmpDirectory> xmpDirectories = metadata.GetDirectoriesOfType<XmpDirectory>();
     NUnit.Framework.Assert.IsNotNull(xmpDirectories);
     Sharpen.Tests.AreEqual(1, xmpDirectories.Count);
     _directory = xmpDirectories.Iterator().Next();
     Sharpen.Tests.IsFalse(_directory.HasErrors());
 }
 public static string[] GetAllMatches(string input)
 {
     Sharpen.Pattern p = Sharpen.Pattern.Compile("([0-9]*\\.[0-9]+|[0-9]+|[a-zA-Z]+|[^\\w\\s])"
         );
     Matcher m = p.Matcher(input);
     AList<string> matches = new AList<string>();
     while (m.Find())
     {
         matches.AddItem(m.Group());
     }
     string[] matchArr = new string[matches.Count];
     return Sharpen.Collections.ToArray(matches, matchArr);
 }
示例#20
0
 public static Query GetQueryById(Database database, string userId)
 {
     View view = database.GetView(ByIdViewName);
     if (view.GetMap() == null)
     {
         Mapper map = new _Mapper_52();
         view.SetMap(map, null);
     }
     Query query = view.CreateQuery();
     IList<object> keys = new AList<object>();
     keys.AddItem(userId);
     query.SetKeys(keys);
     return query;
 }
 /// <exception cref="System.Exception"></exception>
 public virtual void TestJsonArray()
 {
     IList<object> array = new AList<object>();
     array.AddItem("01234567890");
     array.AddItem("bar");
     array.AddItem(5);
     array.AddItem(3.5);
     array.AddItem(true);
     array.AddItem(new DateTime().ToString());
     ObjectWriter mapper = new ObjectWriter();
     byte[] json = mapper.WriteValueAsBytes(array);
     JsonDocument jsdoc = new JsonDocument(json);
     NUnit.Framework.Assert.AreEqual(array, jsdoc.JsonObject());
 }
 public IList<CertificateAndContext> GetCertificateBySubjectName(X509Name subjectName
     )
 {
     IList<CertificateAndContext> list = new AList<CertificateAndContext>();
     foreach (X509Certificate cert in GetCertificates())
     {
         if (subjectName.Equals(cert.SubjectDN))
         {
             CertificateAndContext cc = new CertificateAndContext(cert);
             cc.SetCertificateSource(sourceType);
             list.AddItem(cc);
         }
     }
     return list;
 }
示例#23
0
文件: TJS.cs 项目: fantasydr/krkr-cs
 /// <exception cref="Kirikiri.Tjs2.VariantException"></exception>
 /// <exception cref="Kirikiri.Tjs2.TJSException"></exception>
 public TJS()
 {
     // create script cache object
     mCache = new ScriptCache(this);
     mPPValues = new Dictionary<string, int>();
     SetPPValue("version", VERSION_HEX);
     SetPPValue("environment", ENV_JAVA_APPLICATION);
     // TODO 适切な值を入れる
     SetPPValue("compatibleSystem", 1);
     // 互换システム true
     mGlobal = new CustomObject(GLOBAL_HASH_BITS);
     mScriptBlocks = new AList<WeakReference<ScriptBlock>>();
     Dispatch2 dsp;
     Variant val;
     // Array
     //dsp = new ArrayClass();
     dsp = mArrayClass;
     val = new Variant(dsp, null);
     mGlobal.PropSet(MEMBERENSURE, "Array", val, mGlobal);
     // Dictionary
     //dsp = new DictionaryClass();
     dsp = mDictionayClass;
     val = new Variant(dsp, null);
     mGlobal.PropSet(MEMBERENSURE, "Dictionary", val, mGlobal);
     // Date //TODO: add date back
     //dsp = new DateClass();
     //val = new Variant(dsp, null);
     //mGlobal.PropSet(MEMBERENSURE, "Date", val, mGlobal);
     {
         // Math
         Dispatch2 math;
         dsp = math = new MathClass();
         val = new Variant(dsp, null);
         mGlobal.PropSet(MEMBERENSURE, "Math", val, mGlobal);
         // Math.RandomGenerator
         dsp = new RandomGeneratorClass();
         val = new Variant(dsp, null);
         math.PropSet(MEMBERENSURE, "RandomGenerator", val, math);
     }
     // Exception
     dsp = new ExceptionClass();
     val = new Variant(dsp, null);
     mGlobal.PropSet(MEMBERENSURE, "Exception", val, mGlobal);
     // RegExp
     dsp = new RegExpClass();
     val = new Variant(dsp, null);
     mGlobal.PropSet(MEMBERENSURE, "RegExp", val, mGlobal);
 }
示例#24
0
		public override IList<BasicOcspResp> GetOCSPResponsesFromSignature()
		{
			IList<BasicOcspResp> list = new AList<BasicOcspResp>();
			// Add certificates in CAdES-XL certificate-values inside SignerInfo attribute if present
			SignerInformation si = cmsSignedData.GetSignerInfos().GetFirstSigner(signerId);
			if (si != null && si.UnsignedAttributes != null && si.UnsignedAttributes[PkcsObjectIdentifiers.IdAAEtsRevocationValues] != null)
			{
				RevocationValues revValues = RevocationValues.GetInstance(si.UnsignedAttributes[PkcsObjectIdentifiers.IdAAEtsRevocationValues].AttrValues[0]);
				foreach (BasicOcspResponse ocspObj in revValues.GetOcspVals())
				{
					BasicOcspResp bOcspObj = new BasicOcspResp(ocspObj);
					list.AddItem(bOcspObj);
				}
			}
			return list;
		}
示例#25
0
 public Compiler(TJS owner)
 {
     mOwner = owner;
     // Java で初期值となる初期化は省略
     //mScript = null;
     //mName = null;
     //mInterCodeContext = null;
     //mTopLevelContext = null;
     //mLexicalAnalyzer = null;
     //mUsingPreProcessor = false;
     //mLineOffset = 0;
     //mCompileErrorCount = 0;
     //mNode = null;
     mGeneratorStack = new Stack<InterCodeGenerator>();
     mInterCodeGeneratorList = new AList<InterCodeGenerator>();
     mInterCodeObjectList = new AList<InterCodeObject>();
 }
		public static string RegisterAccessToken(string accessToken, string email, string
			 origin)
		{
			lock (typeof(FacebookAuthorizer))
			{
				IList<string> key = new AList<string>();
				key.AddItem(email);
				key.AddItem(origin);
				if (accessTokens == null)
				{
					accessTokens = new Dictionary<IList<string>, string>();
				}
				Log.D(Database.Tag, "FacebookAuthorizer registering key: " + key);
				accessTokens.Put(key, accessToken);
				return email;
			}
		}
示例#27
0
 public Lexer(Compiler block, string script, bool isexpr, bool resultneeded)
     : base()
 {
     InitReservedWordsHashTable();
     mRetValDeque = new Queue<TokenPair>();
     mEmbeddableExpressionDataStack = new AList<EmbeddableExpressionData>();
     mValues = new AList<object>();
     mBlock = block;
     mIsExprMode = isexpr;
     mResultNeeded = resultneeded;
     mPrevToken = -1;
     if (mIsExprMode)
     {
         mStream = new StringStream(script + ";");
     }
     else
     {
         if (script.StartsWith("#!") == true)
         {
             // #! を // に置换
             mStream = new StringStream("//" + Sharpen.Runtime.Substring(script, 2));
         }
         else
         {
             mStream = new StringStream(script);
         }
     }
     if (CompileState.mEnableDicFuncQuickHack)
     {
         //----- dicfunc quick-hack
         mDicFunc = false;
         if (mIsExprMode && (script.StartsWith("[") == true || script.StartsWith("%[") ==
             true))
         {
             mDicFunc = true;
         }
     }
     //mIfLevel = 0;
     //mPrevPos = 0;
     //mNestLevel = 0;
     mIsFirst = true;
     //mRegularExpression = false;
     //mBareWord = false;
     PutValue(null);
 }
示例#28
0
 public ConstArrayData()
 {
     // 保持したかどうか判定するためのハッシュ
     // temporary
     mByte = new AList<byte>();
     mShort = new AList<short>();
     mInteger = new AList<int>();
     mLong = new AList<long>();
     mDouble = new AList<double>();
     mString = new AList<string>();
     mByteBuffer = new AList<ByteBuffer>();
     mByteHash = new Dictionary<byte, int>();
     mShortHash = new Dictionary<short, int>();
     mIntegerHash = new Dictionary<int, int>();
     mLongHash = new Dictionary<long, int>();
     mDoubleHash = new Dictionary<double, int>();
     mStringHash = new Dictionary<string, int>();
     mByteBufferHash = new Dictionary<ByteBuffer, int>();
 }
示例#29
0
		/// <exception cref="System.IO.IOException"></exception>
		public virtual Document ExtendSignatures(Document document, Document originalData
			, SignatureParameters parameters)
		{
			try
			{
				CmsSignedData signedData = new CmsSignedData(document.OpenStream());
				SignerInformationStore signerStore = signedData.GetSignerInfos();
				AList<SignerInformation> siArray = new AList<SignerInformation>();				

                foreach (SignerInformation si in signerStore.GetSigners())
                {                    
                    try
                    {
                        //jbonilla - Hack para evitar errores cuando una firma ya ha sido extendida.
                        //Se asume que sólo se extiende las firmas desde BES.
                        //TODO jbonilla - Se debería validar hasta qué punto se extendió (BES, T, C, X, XL).
                        if(si.UnsignedAttributes.Count == 0)
                        {
                            siArray.AddItem(ExtendCMSSignature(signedData, si, parameters, originalData));
                        }
                        else
                        {
                            LOG.Error("Already extended?");
                            siArray.AddItem(si);
                        }                        
                    }
                    catch (IOException)
                    {
                        LOG.Error("Exception when extending signature");
                        siArray.AddItem(si);
                    }
                }
				
				SignerInformationStore newSignerStore = new SignerInformationStore(siArray);
				CmsSignedData extended = CmsSignedData.ReplaceSigners(signedData, newSignerStore);
				return new InMemoryDocument(extended.GetEncoded());
			}
			catch (CmsException)
			{
				throw new IOException("Cannot parse CMS data");
			}
		}
示例#30
0
		public override IList<Couchbase.Lite.SavedRevision> GetRevisionHistory()
		{
			IList<Couchbase.Lite.SavedRevision> revisions = new AList<Couchbase.Lite.SavedRevision
				>();
			IList<RevisionInternal> internalRevisions = GetDatabase().GetRevisionHistory(revisionInternal
				);
			foreach (RevisionInternal internalRevision in internalRevisions)
			{
				if (internalRevision.GetRevId().Equals(GetId()))
				{
					revisions.AddItem(this);
				}
				else
				{
					Couchbase.Lite.SavedRevision revision = document.GetRevisionFromRev(internalRevision
						);
					revisions.AddItem(revision);
				}
			}
			Sharpen.Collections.Reverse(revisions);
			return Sharpen.Collections.UnmodifiableList(revisions);
		}
示例#31
0
        internal override void ProcessInbox(RevisionList inbox)
        {
            var lastInboxSequence = inbox[inbox.Count - 1].GetSequence();
            // Generate a set of doc/rev IDs in the JSON format that _revs_diff wants:
            // <http://wiki.apache.org/couchdb/HttpPostRevsDiff>
            var diffs = new Dictionary <String, IList <String> >();

            foreach (var rev in inbox)
            {
                var docID = rev.GetDocId();
                var revs  = diffs.Get(docID);
                if (revs == null)
                {
                    revs         = new AList <String>();
                    diffs[docID] = revs;
                }
                revs.AddItem(rev.GetRevId());
            }

            // Call _revs_diff on the target db:
            Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox() calling asyncTaskStarted()");
            Log.D(Tag, this + "|" + Thread.CurrentThread() + ": posting to /_revs_diff: " + diffs);

            AsyncTaskStarted();
            SendAsyncRequest(HttpMethod.Post, "/_revs_diff", diffs, (response, e) =>
            {
                try {
                    Log.D(Tag, this + "|" + Thread.CurrentThread() + ": /_revs_diff response: " + response);

                    var responseData = (JObject)response;
                    var results      = responseData.ToObject <IDictionary <string, object> >();

                    if (e != null)
                    {
                        LastError = e;
                        RevisionFailed();
                        //Stop ();
                    }
                    else
                    {
                        if (results.Count != 0)
                        {
                            // Go through the list of local changes again, selecting the ones the destination server
                            // said were missing and mapping them to a JSON dictionary in the form _bulk_docs wants:
                            var docsToSend = new AList <object> ();

                            foreach (var rev in inbox)
                            {
                                IDictionary <string, object> properties = null;
                                var resultDocData = (JObject)results.Get(rev.GetDocId());
                                var resultDoc     = resultDocData.ToObject <IDictionary <String, Object> >();
                                if (resultDoc != null)
                                {
                                    var revs = ((JArray)resultDoc.Get("missing")).Values <String>().ToList();
                                    if (revs != null && revs.Contains(rev.GetRevId()))
                                    {
                                        //remote server needs this revision
                                        // Get the revision's properties
                                        if (rev.IsDeleted())
                                        {
                                            properties = new Dictionary <string, object> ();
                                            properties.Put("_id", rev.GetDocId());
                                            properties.Put("_rev", rev.GetRevId());
                                            properties.Put("_deleted", true);
                                        }
                                        else
                                        {
                                            // OPT: Shouldn't include all attachment bodies, just ones that have changed
                                            var contentOptions = EnumSet.Of(TDContentOptions.TDIncludeAttachments, TDContentOptions.TDBigAttachmentsFollow);
                                            try {
                                                LocalDatabase.LoadRevisionBody(rev, contentOptions);
                                            } catch (CouchbaseLiteException e1) {
                                                Log.W(Tag, string.Format("%s Couldn't get local contents of %s", rev, this));
                                                RevisionFailed();
                                                continue;
                                            }
                                            properties = new Dictionary <String, Object> (rev.GetProperties());
                                        }
                                        if (properties.ContainsKey("_attachments"))
                                        {
                                            if (UploadMultipartRevision(rev))
                                            {
                                                continue;
                                            }
                                        }
                                        if (properties != null)
                                        {
                                            // Add the _revisions list:
                                            properties.Put("_revisions", LocalDatabase.GetRevisionHistoryDict(rev));
                                            //now add it to the docs to send
                                            docsToSend.AddItem(properties);
                                        }
                                    }
                                }
                            }
                            // Post the revisions to the destination. "new_edits":false means that the server should
                            // use the given _rev IDs instead of making up new ones.
                            var numDocsToSend = docsToSend.Count;
                            if (numDocsToSend > 0)
                            {
                                var bulkDocsBody = new Dictionary <String, Object> ();
                                bulkDocsBody.Put("docs", docsToSend);
                                bulkDocsBody.Put("new_edits", false);

                                Log.V(Tag, string.Format("%s: POSTing " + numDocsToSend + " revisions to _bulk_docs: %s", this, docsToSend));

                                ChangesCount += numDocsToSend;

                                Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox-before_bulk_docs() calling asyncTaskStarted()");

                                AsyncTaskStarted();
                                SendAsyncRequest(HttpMethod.Post, "/_bulk_docs", bulkDocsBody, (result, ex) => {
                                    try
                                    {
                                        if (ex != null)
                                        {
                                            LastError = ex;
                                            RevisionFailed();
                                        }
                                        else
                                        {
                                            Log.V(Tag, string.Format("%s: POSTed to _bulk_docs: %s", this, docsToSend));
                                            LastSequence = string.Format("{0}", lastInboxSequence);
                                        }
                                        CompletedChangesCount += numDocsToSend;
                                    }
                                    finally
                                    {
                                        AsyncTaskFinished(1);
                                    }
                                });
                            }
                        }
                        else
                        {
                            // If none of the revisions are new to the remote, just bump the lastSequence:
                            LastSequence = string.Format("{0}", lastInboxSequence);
                        }
                    }
                }
                finally
                {
                    Log.D(Tag, this + "|" + Thread.CurrentThread() + ": processInbox() calling asyncTaskFinished()");
                    AsyncTaskFinished(1);
                }
            });
        }
示例#32
0
 /// <param name="comparatorData">the comparatorData to set</param>
 public virtual void SetComparatorData(AList <ComparatorData> comparatorData)
 {
     this.comparatorData = comparatorData;
 }
        public void TestViewGroupedStrings()
        {
            IDictionary <string, object> docProperties1 = new Dictionary <string, object>();

            docProperties1["name"] = "Alice";
            PutDoc(database, docProperties1);
            IDictionary <string, object> docProperties2 = new Dictionary <string, object>();

            docProperties2["name"] = "Albert";
            PutDoc(database, docProperties2);
            IDictionary <string, object> docProperties3 = new Dictionary <string, object>();

            docProperties3["name"] = "Naomi";
            PutDoc(database, docProperties3);
            IDictionary <string, object> docProperties4 = new Dictionary <string, object>();

            docProperties4["name"] = "Jens";
            PutDoc(database, docProperties4);
            IDictionary <string, object> docProperties5 = new Dictionary <string, object>();

            docProperties5["name"] = "Jed";
            PutDoc(database, docProperties5);

            View view = database.GetView("default/names");

            view.SetMapReduce((document, emitter) =>
            {
                string name = (string)document["name"];
                if (name != null)
                {
                    emitter(Sharpen.Runtime.Substring(name, 0, 1), 1);
                }
            }, (keys, values, rereduce) => View.TotalValues(values.ToList()), "1.0");

            view.UpdateIndex();
            QueryOptions options = new QueryOptions();

            options.SetGroupLevel(1);

            IList <QueryRow> rows = view.QueryWithOptions(options).ToList();
            IList <IDictionary <string, object> > expectedRows = new AList <IDictionary <string, object> >();
            IDictionary <string, object>          row1         = new Dictionary <string, object>();

            row1["key"]   = "A";
            row1["value"] = 2;
            expectedRows.AddItem(row1);

            IDictionary <string, object> row2 = new Dictionary <string, object>();

            row2["key"]   = "J";
            row2["value"] = 2;
            expectedRows.AddItem(row2);

            IDictionary <string, object> row3 = new Dictionary <string, object>();

            row3["key"]   = "N";
            row3["value"] = 1;
            expectedRows.AddItem(row3);

            Assert.AreEqual(row1["key"], rows[0].Key);
            Assert.AreEqual(row1["value"], rows[0].Value);
            Assert.AreEqual(row2["key"], rows[1].Key);
            Assert.AreEqual(row2["value"], rows[1].Value);
            Assert.AreEqual(row3["key"], rows[2].Key);
            Assert.AreEqual(row3["value"], rows[2].Value);
        }
示例#34
0
            private void KillBasedOnProgress(bool considerMaps)
            {
                bool fail = false;

                if (considerMaps)
                {
                    ReliabilityTest.Log.Info("Will kill tasks based on Maps' progress");
                }
                else
                {
                    ReliabilityTest.Log.Info("Will kill tasks based on Reduces' progress");
                }
                ReliabilityTest.Log.Info("Initial progress threshold: " + this.threshold + ". Threshold Multiplier: "
                                         + this.thresholdMultiplier + ". Number of iterations: " + this.numIterations);
                float thresholdVal      = this.threshold;
                int   numIterationsDone = 0;

                while (!this.killed)
                {
                    try
                    {
                        float progress;
                        if (this.jc.GetJob(this.rJob.GetID()).IsComplete() || numIterationsDone == this.numIterations)
                        {
                            break;
                        }
                        if (considerMaps)
                        {
                            progress = this.jc.GetJob(this.rJob.GetID()).MapProgress();
                        }
                        else
                        {
                            progress = this.jc.GetJob(this.rJob.GetID()).ReduceProgress();
                        }
                        if (progress >= thresholdVal)
                        {
                            numIterationsDone++;
                            if (numIterationsDone > 0 && numIterationsDone % 2 == 0)
                            {
                                fail = true;
                            }
                            //fail tasks instead of kill
                            ClusterStatus c = this.jc.GetClusterStatus();
                            ReliabilityTest.Log.Info(new DateTime() + " Killing a few tasks");
                            ICollection <TaskAttemptID> runningTasks = new AList <TaskAttemptID>();
                            TaskReport[] mapReports = this.jc.GetMapTaskReports(this.rJob.GetID());
                            foreach (TaskReport mapReport in mapReports)
                            {
                                if (mapReport.GetCurrentStatus() == TIPStatus.Running)
                                {
                                    Sharpen.Collections.AddAll(runningTasks, mapReport.GetRunningTaskAttempts());
                                }
                            }
                            if (runningTasks.Count > c.GetTaskTrackers() / 2)
                            {
                                int count = 0;
                                foreach (TaskAttemptID t in runningTasks)
                                {
                                    ReliabilityTest.Log.Info(new DateTime() + " Killed task : " + t);
                                    this.rJob.KillTask(t, fail);
                                    if (count++ > runningTasks.Count / 2)
                                    {
                                        //kill 50%
                                        break;
                                    }
                                }
                            }
                            runningTasks.Clear();
                            TaskReport[] reduceReports = this.jc.GetReduceTaskReports(this.rJob.GetID());
                            foreach (TaskReport reduceReport in reduceReports)
                            {
                                if (reduceReport.GetCurrentStatus() == TIPStatus.Running)
                                {
                                    Sharpen.Collections.AddAll(runningTasks, reduceReport.GetRunningTaskAttempts());
                                }
                            }
                            if (runningTasks.Count > c.GetTaskTrackers() / 2)
                            {
                                int count = 0;
                                foreach (TaskAttemptID t in runningTasks)
                                {
                                    ReliabilityTest.Log.Info(new DateTime() + " Killed task : " + t);
                                    this.rJob.KillTask(t, fail);
                                    if (count++ > runningTasks.Count / 2)
                                    {
                                        //kill 50%
                                        break;
                                    }
                                }
                            }
                            thresholdVal = thresholdVal * this.thresholdMultiplier;
                        }
                        Sharpen.Thread.Sleep(5000);
                    }
                    catch (Exception)
                    {
                        this.killed = true;
                    }
                    catch (Exception e)
                    {
                        ReliabilityTest.Log.Fatal(StringUtils.StringifyException(e));
                    }
                }
            }
示例#35
0
        /// <summary>Fetches the contents of a revision from the remote db, including its parent revision ID.
        ///     </summary>
        /// <remarks>
        /// Fetches the contents of a revision from the remote db, including its parent revision ID.
        /// The contents are stored into rev.properties.
        /// </remarks>
        internal void PullRemoteRevision(RevisionInternal rev)
        {
            Log.D(Tag, this + "|" + Thread.CurrentThread() + ": pullRemoteRevision with rev: " + rev);
            Log.D(Tag, this + "|" + Thread.CurrentThread() + ": pullRemoteRevision() calling asyncTaskStarted()");

            AsyncTaskStarted();

            httpConnectionCount++;

            // Construct a query. We want the revision history, and the bodies of attachments that have
            // been added since the latest revisions we have locally.
            // See: http://wiki.apache.org/couchdb/HTTP_Document_API#Getting_Attachments_With_a_Document
            var path      = new StringBuilder("/" + HttpUtility.UrlEncode(rev.GetDocId()) + "?rev=" + HttpUtility.UrlEncode(rev.GetRevId()) + "&revs=true&attachments=true");
            var knownRevs = KnownCurrentRevIDs(rev);

            if (knownRevs == null)
            {
                //this means something is wrong, possibly the replicator has shut down
                Log.D(Tag, this + "|" + Thread.CurrentThread() + ": pullRemoteRevision() calling asyncTaskFinished()");
                AsyncTaskFinished(1);
                httpConnectionCount--;
                return;
            }

            if (knownRevs.Count > 0)
            {
                path.Append("&atts_since=");
                path.Append(JoinQuotedEscaped(knownRevs));
            }

            //create a final version of this variable for the log statement inside
            //FIXME find a way to avoid this
            var pathInside = path.ToString();

            SendAsyncMultipartDownloaderRequest(HttpMethod.Get, pathInside, null, LocalDatabase, (result, e) =>
            {
                try
                {
                    // OK, now we've got the response revision:
                    Log.D(Tag, this + ": pullRemoteRevision got response for rev: " + rev);
                    if (result != null)
                    {
                        var properties = ((JObject)result).ToObject <IDictionary <string, object> >();
                        var history    = Database.ParseCouchDBRevisionHistory(properties);

                        if (history != null)
                        {
                            rev.SetProperties(properties);
                            // Add to batcher ... eventually it will be fed to -insertRevisions:.
                            var toInsert = new AList <object> ();
                            toInsert.AddItem(rev);
                            toInsert.AddItem(history);
                            Log.D(Tag, this + ": pullRemoteRevision add rev: " + rev + " to batcher");
                            downloadsToInsert.QueueObject(toInsert);
                            Log.D(Tag, this + "|" + Thread.CurrentThread() + ": pullRemoteRevision.onCompletion() calling asyncTaskStarted()");
                            AsyncTaskStarted();
                        }
                        else
                        {
                            Log.W(Tag, this + ": Missing revision history in response from " + pathInside);
                            CompletedChangesCount += 1;
                        }
                    }
                    else
                    {
                        if (e != null)
                        {
                            Log.E(Tag, "Error pulling remote revision", e);
                            LastError = e;
                        }
                        CompletedChangesCount += 1;
                    }
                }
                finally
                {
                    Log.D(Tag, this + "|" + Thread.CurrentThread() + ": pullRemoteRevision.onCompletion() calling asyncTaskFinished()");
                    AsyncTaskFinished(1);
                }

                // Note that we've finished this task; then start another one if there
                // are still revisions waiting to be pulled:
                --httpConnectionCount;
                PullRemoteRevisions();
            });
        }
示例#36
0
            /// <exception cref="System.IO.IOException"/>
            internal virtual void GenCode(FileWriter hh, FileWriter cc, AList <string> options
                                          )
            {
                CodeBuffer hb = new CodeBuffer();

                string[] ns = this.module.Split("::");
                for (int i = 0; i < ns.Length; i++)
                {
                    hb.Append("namespace " + ns[i] + " {\n");
                }
                hb.Append("class " + this.name + " : public ::hadoop::Record {\n");
                hb.Append("private:\n");
                for (IEnumerator <JField <JType.CppType> > i_1 = this.fields.GetEnumerator(); i_1.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_1.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    type.GenDecl(hb, name);
                }
                // type info vars
                hb.Append("static ::hadoop::RecordTypeInfo* p" + Consts.RtiVar + ";\n");
                hb.Append("static ::hadoop::RecordTypeInfo* p" + Consts.RtiFilter + ";\n");
                hb.Append("static int* p" + Consts.RtiFilterFields + ";\n");
                hb.Append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
                hb.Append("static void setupRtiFields();\n");
                hb.Append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " + Consts.RecordInput
                          + ", const char* " + Consts.Tag + ");\n");
                hb.Append("public:\n");
                hb.Append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " + "{return *p"
                          + Consts.RtiVar + ";}\n");
                hb.Append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
                hb.Append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
                hb.Append("virtual void serialize(::hadoop::OArchive& " + Consts.RecordOutput + ", const char* "
                          + Consts.Tag + ") const;\n");
                hb.Append("virtual void deserialize(::hadoop::IArchive& " + Consts.RecordInput +
                          ", const char* " + Consts.Tag + ");\n");
                hb.Append("virtual const ::std::string& type() const;\n");
                hb.Append("virtual const ::std::string& signature() const;\n");
                hb.Append("virtual bool operator<(const " + this.name + "& peer_) const;\n");
                hb.Append("virtual bool operator==(const " + this.name + "& peer_) const;\n");
                hb.Append("virtual ~" + this.name + "() {};\n");
                for (IEnumerator <JField <JType.CppType> > i_2 = this.fields.GetEnumerator(); i_2.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_2.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    type.GenGetSet(hb, name);
                }
                hb.Append("}; // end record " + this.name + "\n");
                for (int i_3 = ns.Length - 1; i_3 >= 0; i_3--)
                {
                    hb.Append("} // end namespace " + ns[i_3] + "\n");
                }
                hh.Write(hb.ToString());
                CodeBuffer cb = new CodeBuffer();

                // initialize type info vars
                cb.Append("::hadoop::RecordTypeInfo* " + this.fullName + "::p" + Consts.RtiVar +
                          " = " + this.fullName + "::setupTypeInfo();\n");
                cb.Append("::hadoop::RecordTypeInfo* " + this.fullName + "::p" + Consts.RtiFilter
                          + " = NULL;\n");
                cb.Append("int* " + this.fullName + "::p" + Consts.RtiFilterFields + " = NULL;\n\n"
                          );
                // setupTypeInfo()
                cb.Append("::hadoop::RecordTypeInfo* " + this.fullName + "::setupTypeInfo() {\n");
                cb.Append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" + this.
                          name + "\");\n");
                for (IEnumerator <JField <JType.CppType> > i_4 = this.fields.GetEnumerator(); i_4.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_4.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    type.GenStaticTypeInfo(cb, name);
                }
                cb.Append("return p;\n");
                cb.Append("}\n");
                // setTypeFilter()
                cb.Append("void " + this.fullName + "::setTypeFilter(const " + "::hadoop::RecordTypeInfo& rti) {\n"
                          );
                cb.Append("if (NULL != p" + Consts.RtiFilter + ") {\n");
                cb.Append("delete p" + Consts.RtiFilter + ";\n");
                cb.Append("}\n");
                cb.Append("p" + Consts.RtiFilter + " = new ::hadoop::RecordTypeInfo(rti);\n");
                cb.Append("if (NULL != p" + Consts.RtiFilterFields + ") {\n");
                cb.Append("delete p" + Consts.RtiFilterFields + ";\n");
                cb.Append("}\n");
                cb.Append("p" + Consts.RtiFilterFields + " = NULL;\n");
                // set RTIFilter for nested structs. We may end up with multiple lines that
                // do the same thing, if the same struct is nested in more than one field,
                // but that's OK.
                for (IEnumerator <JField <JType.CppType> > i_5 = this.fields.GetEnumerator(); i_5.HasNext
                         ();)
                {
                    JField <JType.CppType> jf   = i_5.Next();
                    JType.CppType          type = jf.GetType();
                    type.GenSetRTIFilter(cb);
                }
                cb.Append("}\n");
                // setTypeFilter()
                cb.Append("void " + this.fullName + "::setTypeFilter(const " + "::hadoop::RecordTypeInfo* prti) {\n"
                          );
                cb.Append("if (NULL != prti) {\n");
                cb.Append("setTypeFilter(*prti);\n");
                cb.Append("}\n");
                cb.Append("}\n");
                // setupRtiFields()
                this.GenSetupRTIFields(cb);
                // serialize()
                cb.Append("void " + this.fullName + "::serialize(::hadoop::OArchive& " + Consts.RecordOutput
                          + ", const char* " + Consts.Tag + ") const {\n");
                cb.Append(Consts.RecordOutput + ".startRecord(*this," + Consts.Tag + ");\n");
                for (IEnumerator <JField <JType.CppType> > i_6 = this.fields.GetEnumerator(); i_6.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_6.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    if (type is JBuffer.CppBuffer)
                    {
                        cb.Append(Consts.RecordOutput + ".serialize(" + name + "," + name + ".length(),\""
                                  + name + "\");\n");
                    }
                    else
                    {
                        cb.Append(Consts.RecordOutput + ".serialize(" + name + ",\"" + name + "\");\n");
                    }
                }
                cb.Append(Consts.RecordOutput + ".endRecord(*this," + Consts.Tag + ");\n");
                cb.Append("return;\n");
                cb.Append("}\n");
                // deserializeWithoutFilter()
                cb.Append("void " + this.fullName + "::deserializeWithoutFilter(::hadoop::IArchive& "
                          + Consts.RecordInput + ", const char* " + Consts.Tag + ") {\n");
                cb.Append(Consts.RecordInput + ".startRecord(*this," + Consts.Tag + ");\n");
                for (IEnumerator <JField <JType.CppType> > i_7 = this.fields.GetEnumerator(); i_7.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_7.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    if (type is JBuffer.CppBuffer)
                    {
                        cb.Append("{\nsize_t len=0; " + Consts.RecordInput + ".deserialize(" + name + ",len,\""
                                  + name + "\");\n}\n");
                    }
                    else
                    {
                        cb.Append(Consts.RecordInput + ".deserialize(" + name + ",\"" + name + "\");\n");
                    }
                }
                cb.Append(Consts.RecordInput + ".endRecord(*this," + Consts.Tag + ");\n");
                cb.Append("return;\n");
                cb.Append("}\n");
                // deserialize()
                cb.Append("void " + this.fullName + "::deserialize(::hadoop::IArchive& " + Consts
                          .RecordInput + ", const char* " + Consts.Tag + ") {\n");
                cb.Append("if (NULL == p" + Consts.RtiFilter + ") {\n");
                cb.Append("deserializeWithoutFilter(" + Consts.RecordInput + ", " + Consts.Tag +
                          ");\n");
                cb.Append("return;\n");
                cb.Append("}\n");
                cb.Append("// if we're here, we need to read based on version info\n");
                cb.Append(Consts.RecordInput + ".startRecord(*this," + Consts.Tag + ");\n");
                cb.Append("setupRtiFields();\n");
                cb.Append("for (unsigned int " + Consts.RioPrefix + "i=0; " + Consts.RioPrefix +
                          "i<p" + Consts.RtiFilter + "->getFieldTypeInfos().size(); " + Consts.RioPrefix +
                          "i++) {\n");
                int ct = 0;

                for (IEnumerator <JField <JType.CppType> > i_8 = this.fields.GetEnumerator(); i_8.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_8.Next();
                    string        name        = jf.GetName();
                    JType.CppType type        = jf.GetType();
                    ct++;
                    if (1 != ct)
                    {
                        cb.Append("else ");
                    }
                    cb.Append("if (" + ct + " == p" + Consts.RtiFilterFields + "[" + Consts.RioPrefix
                              + "i]) {\n");
                    if (type is JBuffer.CppBuffer)
                    {
                        cb.Append("{\nsize_t len=0; " + Consts.RecordInput + ".deserialize(" + name + ",len,\""
                                  + name + "\");\n}\n");
                    }
                    else
                    {
                        cb.Append(Consts.RecordInput + ".deserialize(" + name + ",\"" + name + "\");\n");
                    }
                    cb.Append("}\n");
                }
                if (0 != ct)
                {
                    cb.Append("else {\n");
                    cb.Append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" + Consts
                              .RtiFilter + "->getFieldTypeInfos();\n");
                    cb.Append("::hadoop::Utils::skip(" + Consts.RecordInput + ", typeInfos[" + Consts
                              .RioPrefix + "i]->getFieldID()->c_str()" + ", *(typeInfos[" + Consts.RioPrefix +
                              "i]->getTypeID()));\n");
                    cb.Append("}\n");
                }
                cb.Append("}\n");
                cb.Append(Consts.RecordInput + ".endRecord(*this, " + Consts.Tag + ");\n");
                cb.Append("}\n");
                // operator <
                cb.Append("bool " + this.fullName + "::operator< (const " + this.fullName + "& peer_) const {\n"
                          );
                cb.Append("return (1\n");
                for (IEnumerator <JField <JType.CppType> > i_9 = this.fields.GetEnumerator(); i_9.HasNext
                         ();)
                {
                    JField <JType.CppType> jf = i_9.Next();
                    string name = jf.GetName();
                    cb.Append("&& (" + name + " < peer_." + name + ")\n");
                }
                cb.Append(");\n");
                cb.Append("}\n");
                cb.Append("bool " + this.fullName + "::operator== (const " + this.fullName + "& peer_) const {\n"
                          );
                cb.Append("return (1\n");
                for (IEnumerator <JField <JType.CppType> > i_10 = this.fields.GetEnumerator(); i_10.
                     HasNext();)
                {
                    JField <JType.CppType> jf = i_10.Next();
                    string name = jf.GetName();
                    cb.Append("&& (" + name + " == peer_." + name + ")\n");
                }
                cb.Append(");\n");
                cb.Append("}\n");
                cb.Append("const ::std::string&" + this.fullName + "::type() const {\n");
                cb.Append("static const ::std::string type_(\"" + this.name + "\");\n");
                cb.Append("return type_;\n");
                cb.Append("}\n");
                cb.Append("const ::std::string&" + this.fullName + "::signature() const {\n");
                cb.Append("static const ::std::string sig_(\"" + this._enclosing.GetSignature() +
                          "\");\n");
                cb.Append("return sig_;\n");
                cb.Append("}\n");
                cc.Write(cb.ToString());
            }
示例#37
0
        /// <exception cref="TjsException"></exception>
        /// <exception cref="VariantException"></exception>
        public virtual void AssignStructure(Dispatch2 dsp, AList <Dispatch2> stack)
        {
            // assign structured data from dsp
            ArrayNI arrayni = (ArrayNI)dsp.GetNativeInstance(ArrayClass.ClassID);

            if (arrayni != null)
            {
                // copy from array
                stack.AddItem(dsp);
                try
                {
                    mItems.Clear();
                    int count = arrayni.mItems.Count;
                    for (int i = 0; i < count; i++)
                    {
                        Variant v = arrayni.mItems[i];
                        if (v.IsObject())
                        {
                            // object
                            Dispatch2 dsp1 = v.AsObject();
                            // determin dsp's object type
                            //DictionaryNI dicni = null;
                            //ArrayNI arrayni1 = null;
                            if (dsp1 != null && dsp1.GetNativeInstance(DictionaryClass.ClassID) != null)
                            {
                                //dicni = (DictionaryNI)ni.mValue;
                                // dictionary
                                bool objrec = false;
                                int  scount = stack.Count;
                                for (int j = 0; j < scount; j++)
                                {
                                    Dispatch2 d = stack[j];
                                    if (d == dsp1)
                                    {
                                        // object recursion detected
                                        objrec = true;
                                        break;
                                    }
                                }
                                if (objrec)
                                {
                                    mItems.AddItem(new Variant());
                                }
                                else
                                {
                                    // becomes null
                                    Dispatch2 newobj = Tjs.CreateDictionaryObject();
                                    mItems.AddItem(new Variant(newobj, newobj));
                                    DictionaryNI newni;
                                    if ((newni = (DictionaryNI)newobj.GetNativeInstance(DictionaryClass.ClassID)) !=
                                        null)
                                    {
                                        newni.AssignStructure(dsp1, stack);
                                    }
                                }
                            }
                            else
                            {
                                if (dsp1 != null && dsp1.GetNativeInstance(ArrayClass.ClassID) != null)
                                {
                                    // array
                                    bool objrec = false;
                                    int  scount = stack.Count;
                                    for (int j = 0; j < scount; j++)
                                    {
                                        Dispatch2 d = stack[j];
                                        if (d == dsp1)
                                        {
                                            // object recursion detected
                                            objrec = true;
                                            break;
                                        }
                                    }
                                    if (objrec)
                                    {
                                        mItems.AddItem(new Variant());
                                    }
                                    else
                                    {
                                        // becomes null
                                        Dispatch2 newobj = Tjs.CreateArrayObject();
                                        mItems.AddItem(new Variant(newobj, newobj));
                                        ArrayNI newni;
                                        if ((newni = (ArrayNI)newobj.GetNativeInstance(ArrayClass.ClassID)) != null)
                                        {
                                            newni.AssignStructure(dsp1, stack);
                                        }
                                    }
                                }
                                else
                                {
                                    // other object types
                                    mItems.AddItem(v);
                                }
                            }
                        }
                        else
                        {
                            // others
                            mItems.AddItem(v);
                        }
                    }
                }
                finally
                {
                    stack.Remove(stack.Count - 1);
                }
            }
            else
            {
                throw new TjsException(Error.SpecifyDicOrArray);
            }
        }
示例#38
0
        /// <exception cref="System.IO.IOException"/>
        private RawKeyValueIterator FinalMerge(JobConf job, FileSystem fs, IList <InMemoryMapOutput
                                                                                  <K, V> > inMemoryMapOutputs, IList <MergeManagerImpl.CompressAwarePath> onDiskMapOutputs
                                               )
        {
            Log.Info("finalMerge called with " + inMemoryMapOutputs.Count + " in-memory map-outputs and "
                     + onDiskMapOutputs.Count + " on-disk map-outputs");
            long maxInMemReduce = GetMaxInMemReduceLimit();
            // merge config params
            Type keyClass   = (Type)job.GetMapOutputKeyClass();
            Type valueClass = (Type)job.GetMapOutputValueClass();
            bool keepInputs = job.GetKeepFailedTaskFiles();
            Path tmpDir     = new Path(reduceId.ToString());
            RawComparator <K> comparator = (RawComparator <K>)job.GetOutputKeyComparator();
            // segments required to vacate memory
            IList <Merger.Segment <K, V> > memDiskSegments = new AList <Merger.Segment <K, V> >();
            long inMemToDiskBytes   = 0;
            bool mergePhaseFinished = false;

            if (inMemoryMapOutputs.Count > 0)
            {
                TaskID mapId = inMemoryMapOutputs[0].GetMapId().GetTaskID();
                inMemToDiskBytes = CreateInMemorySegments(inMemoryMapOutputs, memDiskSegments, maxInMemReduce
                                                          );
                int numMemDiskSegments = memDiskSegments.Count;
                if (numMemDiskSegments > 0 && ioSortFactor > onDiskMapOutputs.Count)
                {
                    // If we reach here, it implies that we have less than io.sort.factor
                    // disk segments and this will be incremented by 1 (result of the
                    // memory segments merge). Since this total would still be
                    // <= io.sort.factor, we will not do any more intermediate merges,
                    // the merge of all these disk segments would be directly fed to the
                    // reduce method
                    mergePhaseFinished = true;
                    // must spill to disk, but can't retain in-mem for intermediate merge
                    Path outputPath = mapOutputFile.GetInputFileForWrite(mapId, inMemToDiskBytes).Suffix
                                          (Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix);
                    RawKeyValueIterator rIter = Merger.Merge(job, fs, keyClass, valueClass, memDiskSegments
                                                             , numMemDiskSegments, tmpDir, comparator, reporter, spilledRecordsCounter, null,
                                                             mergePhase);
                    FSDataOutputStream  @out   = CryptoUtils.WrapIfNecessary(job, fs.Create(outputPath));
                    IFile.Writer <K, V> writer = new IFile.Writer <K, V>(job, @out, keyClass, valueClass
                                                                         , codec, null, true);
                    try
                    {
                        Merger.WriteFile(rIter, writer, reporter, job);
                        writer.Close();
                        onDiskMapOutputs.AddItem(new MergeManagerImpl.CompressAwarePath(outputPath, writer
                                                                                        .GetRawLength(), writer.GetCompressedLength()));
                        writer = null;
                    }
                    catch (IOException e)
                    {
                        // add to list of final disk outputs.
                        if (null != outputPath)
                        {
                            try
                            {
                                fs.Delete(outputPath, true);
                            }
                            catch (IOException)
                            {
                            }
                        }
                        // NOTHING
                        throw;
                    }
                    finally
                    {
                        if (null != writer)
                        {
                            writer.Close();
                        }
                    }
                    Log.Info("Merged " + numMemDiskSegments + " segments, " + inMemToDiskBytes + " bytes to disk to satisfy "
                             + "reduce memory limit");
                    inMemToDiskBytes = 0;
                    memDiskSegments.Clear();
                }
                else
                {
                    if (inMemToDiskBytes != 0)
                    {
                        Log.Info("Keeping " + numMemDiskSegments + " segments, " + inMemToDiskBytes + " bytes in memory for "
                                 + "intermediate, on-disk merge");
                    }
                }
            }
            // segments on disk
            IList <Merger.Segment <K, V> > diskSegments = new AList <Merger.Segment <K, V> >();
            long onDiskBytes = inMemToDiskBytes;
            long rawBytes    = inMemToDiskBytes;

            MergeManagerImpl.CompressAwarePath[] onDisk = Sharpen.Collections.ToArray(onDiskMapOutputs
                                                                                      , new MergeManagerImpl.CompressAwarePath[onDiskMapOutputs.Count]);
            foreach (MergeManagerImpl.CompressAwarePath file in onDisk)
            {
                long fileLength = fs.GetFileStatus(file).GetLen();
                onDiskBytes += fileLength;
                rawBytes    += (file.GetRawDataLength() > 0) ? file.GetRawDataLength() : fileLength;
                Log.Debug("Disk file: " + file + " Length is " + fileLength);
                diskSegments.AddItem(new Merger.Segment <K, V>(job, fs, file, codec, keepInputs, (
                                                                   file.ToString().EndsWith(Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix) ? null
                                         : mergedMapOutputsCounter), file.GetRawDataLength()));
            }
            Log.Info("Merging " + onDisk.Length + " files, " + onDiskBytes + " bytes from disk"
                     );
            diskSegments.Sort(new _IComparer_786());
            // build final list of segments from merged backed by disk + in-mem
            IList <Merger.Segment <K, V> > finalSegments = new AList <Merger.Segment <K, V> >();
            long inMemBytes = CreateInMemorySegments(inMemoryMapOutputs, finalSegments, 0);

            Log.Info("Merging " + finalSegments.Count + " segments, " + inMemBytes + " bytes from memory into reduce"
                     );
            if (0 != onDiskBytes)
            {
                int numInMemSegments = memDiskSegments.Count;
                diskSegments.AddRange(0, memDiskSegments);
                memDiskSegments.Clear();
                // Pass mergePhase only if there is a going to be intermediate
                // merges. See comment where mergePhaseFinished is being set
                Progress            thisPhase = (mergePhaseFinished) ? null : mergePhase;
                RawKeyValueIterator diskMerge = Merger.Merge(job, fs, keyClass, valueClass, codec
                                                             , diskSegments, ioSortFactor, numInMemSegments, tmpDir, comparator, reporter, false
                                                             , spilledRecordsCounter, null, thisPhase);
                diskSegments.Clear();
                if (0 == finalSegments.Count)
                {
                    return(diskMerge);
                }
                finalSegments.AddItem(new Merger.Segment <K, V>(new MergeManagerImpl.RawKVIteratorReader
                                                                    (this, diskMerge, onDiskBytes), true, rawBytes));
            }
            return(Merger.Merge(job, fs, keyClass, valueClass, finalSegments, finalSegments.Count
                                , tmpDir, comparator, reporter, spilledRecordsCounter, null, null));
        }
示例#39
0
            /// <exception cref="System.IO.IOException"/>
            internal virtual void GenCode(string destDir, AList <string> options)
            {
                string   pkg     = this.module;
                string   pkgpath = pkg.ReplaceAll("\\.", "/");
                FilePath pkgdir  = new FilePath(destDir, pkgpath);
                FilePath jfile   = new FilePath(pkgdir, this.name + ".java");

                if (!pkgdir.Exists())
                {
                    // create the pkg directory
                    bool ret = pkgdir.Mkdirs();
                    if (!ret)
                    {
                        throw new IOException("Cannnot create directory: " + pkgpath);
                    }
                }
                else
                {
                    if (!pkgdir.IsDirectory())
                    {
                        // not a directory
                        throw new IOException(pkgpath + " is not a directory.");
                    }
                }
                CodeBuffer cb = new CodeBuffer();

                cb.Append("// File generated by hadoop record compiler. Do not edit.\n");
                cb.Append("package " + this.module + ";\n\n");
                cb.Append("public class " + this.name + " extends org.apache.hadoop.record.Record {\n"
                          );
                // type information declarations
                cb.Append("private static final " + "org.apache.hadoop.record.meta.RecordTypeInfo "
                          + Consts.RtiVar + ";\n");
                cb.Append("private static " + "org.apache.hadoop.record.meta.RecordTypeInfo " + Consts
                          .RtiFilter + ";\n");
                cb.Append("private static int[] " + Consts.RtiFilterFields + ";\n");
                // static init for type information
                cb.Append("static {\n");
                cb.Append(Consts.RtiVar + " = " + "new org.apache.hadoop.record.meta.RecordTypeInfo(\""
                          + this.name + "\");\n");
                for (IEnumerator <JField <JType.JavaType> > i = this.fields.GetEnumerator(); i.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i.Next();
                    string         name        = jf.GetName();
                    JType.JavaType type        = jf.GetType();
                    type.GenStaticTypeInfo(cb, name);
                }
                cb.Append("}\n\n");
                // field definitions
                for (IEnumerator <JField <JType.JavaType> > i_1 = this.fields.GetEnumerator(); i_1.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_1.Next();
                    string         name        = jf.GetName();
                    JType.JavaType type        = jf.GetType();
                    type.GenDecl(cb, name);
                }
                // default constructor
                cb.Append("public " + this.name + "() { }\n");
                // constructor
                cb.Append("public " + this.name + "(\n");
                int fIdx = 0;

                for (IEnumerator <JField <JType.JavaType> > i_2 = this.fields.GetEnumerator(); i_2.HasNext
                         (); fIdx++)
                {
                    JField <JType.JavaType> jf = i_2.Next();
                    string         name        = jf.GetName();
                    JType.JavaType type        = jf.GetType();
                    type.GenConstructorParam(cb, name);
                    cb.Append((!i_2.HasNext()) ? string.Empty : ",\n");
                }
                cb.Append(") {\n");
                fIdx = 0;
                for (IEnumerator <JField <JType.JavaType> > i_3 = this.fields.GetEnumerator(); i_3.HasNext
                         (); fIdx++)
                {
                    JField <JType.JavaType> jf = i_3.Next();
                    string         name        = jf.GetName();
                    JType.JavaType type        = jf.GetType();
                    type.GenConstructorSet(cb, name);
                }
                cb.Append("}\n");
                // getter/setter for type info
                cb.Append("public static org.apache.hadoop.record.meta.RecordTypeInfo" + " getTypeInfo() {\n"
                          );
                cb.Append("return " + Consts.RtiVar + ";\n");
                cb.Append("}\n");
                cb.Append("public static void setTypeFilter(" + "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n"
                          );
                cb.Append("if (null == rti) return;\n");
                cb.Append(Consts.RtiFilter + " = rti;\n");
                cb.Append(Consts.RtiFilterFields + " = null;\n");
                // set RTIFilter for nested structs.
                // To prevent setting up the type filter for the same struct more than once,
                // we use a hash map to keep track of what we've set.
                IDictionary <string, int> nestedStructMap = new Dictionary <string, int>();

                foreach (JField <JType.JavaType> jf_1 in this.fields)
                {
                    JType.JavaType type = jf_1.GetType();
                    type.GenSetRTIFilter(cb, nestedStructMap);
                }
                cb.Append("}\n");
                // setupRtiFields()
                this.GenSetupRtiFields(cb);
                // getters/setters for member variables
                for (IEnumerator <JField <JType.JavaType> > i_4 = this.fields.GetEnumerator(); i_4.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_4.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenGetSet(cb, name);
                }
                // serialize()
                cb.Append("public void serialize(" + "final org.apache.hadoop.record.RecordOutput "
                          + Consts.RecordOutput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n"
                          );
                cb.Append(Consts.RecordOutput + ".startRecord(this," + Consts.Tag + ");\n");
                for (IEnumerator <JField <JType.JavaType> > i_5 = this.fields.GetEnumerator(); i_5.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_5.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenWriteMethod(cb, name, name);
                }
                cb.Append(Consts.RecordOutput + ".endRecord(this," + Consts.Tag + ");\n");
                cb.Append("}\n");
                // deserializeWithoutFilter()
                cb.Append("private void deserializeWithoutFilter(" + "final org.apache.hadoop.record.RecordInput "
                          + Consts.RecordInput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n"
                          );
                cb.Append(Consts.RecordInput + ".startRecord(" + Consts.Tag + ");\n");
                for (IEnumerator <JField <JType.JavaType> > i_6 = this.fields.GetEnumerator(); i_6.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_6.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenReadMethod(cb, name, name, false);
                }
                cb.Append(Consts.RecordInput + ".endRecord(" + Consts.Tag + ");\n");
                cb.Append("}\n");
                // deserialize()
                cb.Append("public void deserialize(final " + "org.apache.hadoop.record.RecordInput "
                          + Consts.RecordInput + ", final String " + Consts.Tag + ")\n" + "throws java.io.IOException {\n"
                          );
                cb.Append("if (null == " + Consts.RtiFilter + ") {\n");
                cb.Append("deserializeWithoutFilter(" + Consts.RecordInput + ", " + Consts.Tag +
                          ");\n");
                cb.Append("return;\n");
                cb.Append("}\n");
                cb.Append("// if we're here, we need to read based on version info\n");
                cb.Append(Consts.RecordInput + ".startRecord(" + Consts.Tag + ");\n");
                cb.Append("setupRtiFields();\n");
                cb.Append("for (int " + Consts.RioPrefix + "i=0; " + Consts.RioPrefix + "i<" + Consts
                          .RtiFilter + ".getFieldTypeInfos().size(); " + Consts.RioPrefix + "i++) {\n");
                int ct = 0;

                for (IEnumerator <JField <JType.JavaType> > i_7 = this.fields.GetEnumerator(); i_7.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_7.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    ct++;
                    if (1 != ct)
                    {
                        cb.Append("else ");
                    }
                    cb.Append("if (" + ct + " == " + Consts.RtiFilterFields + "[" + Consts.RioPrefix
                              + "i]) {\n");
                    type.GenReadMethod(cb, name, name, false);
                    cb.Append("}\n");
                }
                if (0 != ct)
                {
                    cb.Append("else {\n");
                    cb.Append("java.util.ArrayList<" + "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = "
                              + "(java.util.ArrayList<" + "org.apache.hadoop.record.meta.FieldTypeInfo>)" + "("
                              + Consts.RtiFilter + ".getFieldTypeInfos());\n");
                    cb.Append("org.apache.hadoop.record.meta.Utils.skip(" + Consts.RecordInput + ", "
                              + "typeInfos.get(" + Consts.RioPrefix + "i).getFieldID(), typeInfos.get(" + Consts
                              .RioPrefix + "i).getTypeID());\n");
                    cb.Append("}\n");
                }
                cb.Append("}\n");
                cb.Append(Consts.RecordInput + ".endRecord(" + Consts.Tag + ");\n");
                cb.Append("}\n");
                // compareTo()
                cb.Append("public int compareTo (final Object " + Consts.RioPrefix + "peer_) throws ClassCastException {\n"
                          );
                cb.Append("if (!(" + Consts.RioPrefix + "peer_ instanceof " + this.name + ")) {\n"
                          );
                cb.Append("throw new ClassCastException(\"Comparing different types of records.\");\n"
                          );
                cb.Append("}\n");
                cb.Append(this.name + " " + Consts.RioPrefix + "peer = (" + this.name + ") " + Consts
                          .RioPrefix + "peer_;\n");
                cb.Append("int " + Consts.RioPrefix + "ret = 0;\n");
                for (IEnumerator <JField <JType.JavaType> > i_8 = this.fields.GetEnumerator(); i_8.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_8.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenCompareTo(cb, name, Consts.RioPrefix + "peer." + name);
                    cb.Append("if (" + Consts.RioPrefix + "ret != 0) return " + Consts.RioPrefix + "ret;\n"
                              );
                }
                cb.Append("return " + Consts.RioPrefix + "ret;\n");
                cb.Append("}\n");
                // equals()
                cb.Append("public boolean equals(final Object " + Consts.RioPrefix + "peer_) {\n"
                          );
                cb.Append("if (!(" + Consts.RioPrefix + "peer_ instanceof " + this.name + ")) {\n"
                          );
                cb.Append("return false;\n");
                cb.Append("}\n");
                cb.Append("if (" + Consts.RioPrefix + "peer_ == this) {\n");
                cb.Append("return true;\n");
                cb.Append("}\n");
                cb.Append(this.name + " " + Consts.RioPrefix + "peer = (" + this.name + ") " + Consts
                          .RioPrefix + "peer_;\n");
                cb.Append("boolean " + Consts.RioPrefix + "ret = false;\n");
                for (IEnumerator <JField <JType.JavaType> > i_9 = this.fields.GetEnumerator(); i_9.HasNext
                         ();)
                {
                    JField <JType.JavaType> jf = i_9.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenEquals(cb, name, Consts.RioPrefix + "peer." + name);
                    cb.Append("if (!" + Consts.RioPrefix + "ret) return " + Consts.RioPrefix + "ret;\n"
                              );
                }
                cb.Append("return " + Consts.RioPrefix + "ret;\n");
                cb.Append("}\n");
                // clone()
                cb.Append("public Object clone() throws CloneNotSupportedException {\n");
                cb.Append(this.name + " " + Consts.RioPrefix + "other = new " + this.name + "();\n"
                          );
                for (IEnumerator <JField <JType.JavaType> > i_10 = this.fields.GetEnumerator(); i_10
                     .HasNext();)
                {
                    JField <JType.JavaType> jf = i_10.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenClone(cb, name);
                }
                cb.Append("return " + Consts.RioPrefix + "other;\n");
                cb.Append("}\n");
                cb.Append("public int hashCode() {\n");
                cb.Append("int " + Consts.RioPrefix + "result = 17;\n");
                cb.Append("int " + Consts.RioPrefix + "ret;\n");
                for (IEnumerator <JField <JType.JavaType> > i_11 = this.fields.GetEnumerator(); i_11
                     .HasNext();)
                {
                    JField <JType.JavaType> jf = i_11.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenHashCode(cb, name);
                    cb.Append(Consts.RioPrefix + "result = 37*" + Consts.RioPrefix + "result + " + Consts
                              .RioPrefix + "ret;\n");
                }
                cb.Append("return " + Consts.RioPrefix + "result;\n");
                cb.Append("}\n");
                cb.Append("public static String signature() {\n");
                cb.Append("return \"" + this._enclosing.GetSignature() + "\";\n");
                cb.Append("}\n");
                cb.Append("public static class Comparator extends" + " org.apache.hadoop.record.RecordComparator {\n"
                          );
                cb.Append("public Comparator() {\n");
                cb.Append("super(" + this.name + ".class);\n");
                cb.Append("}\n");
                cb.Append("static public int slurpRaw(byte[] b, int s, int l) {\n");
                cb.Append("try {\n");
                cb.Append("int os = s;\n");
                for (IEnumerator <JField <JType.JavaType> > i_12 = this.fields.GetEnumerator(); i_12
                     .HasNext();)
                {
                    JField <JType.JavaType> jf = i_12.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenSlurpBytes(cb, "b", "s", "l");
                }
                cb.Append("return (os - s);\n");
                cb.Append("} catch(java.io.IOException e) {\n");
                cb.Append("throw new RuntimeException(e);\n");
                cb.Append("}\n");
                cb.Append("}\n");
                cb.Append("static public int compareRaw(byte[] b1, int s1, int l1,\n");
                cb.Append("                             byte[] b2, int s2, int l2) {\n");
                cb.Append("try {\n");
                cb.Append("int os1 = s1;\n");
                for (IEnumerator <JField <JType.JavaType> > i_13 = this.fields.GetEnumerator(); i_13
                     .HasNext();)
                {
                    JField <JType.JavaType> jf = i_13.Next();
                    string         name        = jf_1.GetName();
                    JType.JavaType type        = jf_1.GetType();
                    type.GenCompareBytes(cb);
                }
                cb.Append("return (os1 - s1);\n");
                cb.Append("} catch(java.io.IOException e) {\n");
                cb.Append("throw new RuntimeException(e);\n");
                cb.Append("}\n");
                cb.Append("}\n");
                cb.Append("public int compare(byte[] b1, int s1, int l1,\n");
                cb.Append("                   byte[] b2, int s2, int l2) {\n");
                cb.Append("int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n");
                cb.Append("return (ret == -1)? -1 : ((ret==0)? 1 : 0);");
                cb.Append("}\n");
                cb.Append("}\n\n");
                cb.Append("static {\n");
                cb.Append("org.apache.hadoop.record.RecordComparator.define(" + this.name + ".class, new Comparator());\n"
                          );
                cb.Append("}\n");
                cb.Append("}\n");
                FileWriter jj = new FileWriter(jfile);

                try
                {
                    jj.Write(cb.ToString());
                }
                finally
                {
                    jj.Close();
                }
            }
示例#40
0
            /// <exception cref="System.IO.IOException"/>
            public override void Merge(IList <InMemoryMapOutput <K, V> > inputs)
            {
                if (inputs == null || inputs.Count == 0)
                {
                    return;
                }
                //name this output file same as the name of the first file that is
                //there in the current list of inmem files (this is guaranteed to
                //be absent on the disk currently. So we don't overwrite a prev.
                //created spill). Also we need to create the output file now since
                //it is not guaranteed that this file will be present after merge
                //is called (we delete empty files as soon as we see them
                //in the merge method)
                //figure out the mapId
                TaskAttemptID mapId     = inputs[0].GetMapId();
                TaskID        mapTaskId = mapId.GetTaskID();
                IList <Merger.Segment <K, V> > inMemorySegments = new AList <Merger.Segment <K, V> >();
                long mergeOutputSize = this._enclosing.CreateInMemorySegments(inputs, inMemorySegments
                                                                              , 0);
                int  noInMemorySegments = inMemorySegments.Count;
                Path outputPath         = this._enclosing.mapOutputFile.GetInputFileForWrite(mapTaskId, mergeOutputSize
                                                                                             ).Suffix(Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix);
                FSDataOutputStream @out = CryptoUtils.WrapIfNecessary(this._enclosing.jobConf, this
                                                                      ._enclosing.rfs.Create(outputPath));

                IFile.Writer <K, V> writer = new IFile.Writer <K, V>(this._enclosing.jobConf, @out,
                                                                     (Type)this._enclosing.jobConf.GetMapOutputKeyClass(), (Type)this._enclosing.jobConf
                                                                     .GetMapOutputValueClass(), this._enclosing.codec, null, true);
                RawKeyValueIterator rIter = null;

                MergeManagerImpl.CompressAwarePath compressAwarePath;
                try
                {
                    MergeManagerImpl.Log.Info("Initiating in-memory merge with " + noInMemorySegments
                                              + " segments...");
                    rIter = Merger.Merge(this._enclosing.jobConf, this._enclosing.rfs, (Type)this._enclosing
                                         .jobConf.GetMapOutputKeyClass(), (Type)this._enclosing.jobConf.GetMapOutputValueClass
                                             (), inMemorySegments, inMemorySegments.Count, new Path(this._enclosing.reduceId.
                                                                                                    ToString()), (RawComparator <K>) this._enclosing.jobConf.GetOutputKeyComparator(),
                                         this._enclosing.reporter, this._enclosing.spilledRecordsCounter, null, null);
                    if (null == this._enclosing.combinerClass)
                    {
                        Merger.WriteFile(rIter, writer, this._enclosing.reporter, this._enclosing.jobConf
                                         );
                    }
                    else
                    {
                        this._enclosing.combineCollector.SetWriter(writer);
                        this._enclosing.CombineAndSpill(rIter, this._enclosing.reduceCombineInputCounter);
                    }
                    writer.Close();
                    compressAwarePath = new MergeManagerImpl.CompressAwarePath(outputPath, writer.GetRawLength
                                                                                   (), writer.GetCompressedLength());
                    MergeManagerImpl.Log.Info(this._enclosing.reduceId + " Merge of the " + noInMemorySegments
                                              + " files in-memory complete." + " Local file is " + outputPath + " of size " +
                                              this._enclosing.localFS.GetFileStatus(outputPath).GetLen());
                }
                catch (IOException e)
                {
                    //make sure that we delete the ondisk file that we created
                    //earlier when we invoked cloneFileAttributes
                    this._enclosing.localFS.Delete(outputPath, true);
                    throw;
                }
                // Note the output of the merge
                this._enclosing.CloseOnDiskFile(compressAwarePath);
            }
示例#41
0
        public virtual void TestAuxEventDispatch()
        {
            Configuration conf = new Configuration();

            conf.SetStrings(YarnConfiguration.NmAuxServices, new string[] { "Asrv", "Bsrv" });
            conf.SetClass(string.Format(YarnConfiguration.NmAuxServiceFmt, "Asrv"), typeof(TestAuxServices.ServiceA
                                                                                           ), typeof(Org.Apache.Hadoop.Service.Service));
            conf.SetClass(string.Format(YarnConfiguration.NmAuxServiceFmt, "Bsrv"), typeof(TestAuxServices.ServiceB
                                                                                           ), typeof(Org.Apache.Hadoop.Service.Service));
            conf.SetInt("A.expected.init", 1);
            conf.SetInt("B.expected.stop", 1);
            AuxServices aux = new AuxServices();

            aux.Init(conf);
            aux.Start();
            ApplicationId appId1 = ApplicationId.NewInstance(0, 65);
            ByteBuffer    buf    = ByteBuffer.Allocate(6);

            buf.PutChar('A');
            buf.PutInt(65);
            buf.Flip();
            AuxServicesEvent @event = new AuxServicesEvent(AuxServicesEventType.ApplicationInit
                                                           , "user0", appId1, "Asrv", buf);

            aux.Handle(@event);
            ApplicationId appId2 = ApplicationId.NewInstance(0, 66);

            @event = new AuxServicesEvent(AuxServicesEventType.ApplicationStop, "user0", appId2
                                          , "Bsrv", null);
            // verify all services got the stop event
            aux.Handle(@event);
            ICollection <AuxiliaryService> servs = aux.GetServices();

            foreach (AuxiliaryService serv in servs)
            {
                AList <int> appIds = ((TestAuxServices.LightService)serv).GetAppIdsStopped();
                NUnit.Framework.Assert.AreEqual("app not properly stopped", 1, appIds.Count);
                NUnit.Framework.Assert.IsTrue("wrong app stopped", appIds.Contains((int)66));
            }
            foreach (AuxiliaryService serv_1 in servs)
            {
                NUnit.Framework.Assert.IsNull(((TestAuxServices.LightService)serv_1).containerId);
                NUnit.Framework.Assert.IsNull(((TestAuxServices.LightService)serv_1).resource);
            }
            ApplicationAttemptId     attemptId = ApplicationAttemptId.NewInstance(appId1, 1);
            ContainerTokenIdentifier cti       = new ContainerTokenIdentifier(ContainerId.NewContainerId
                                                                                  (attemptId, 1), string.Empty, string.Empty, Resource.NewInstance(1, 1), 0, 0, 0,
                                                                              Priority.NewInstance(0), 0);

            Org.Apache.Hadoop.Yarn.Server.Nodemanager.Containermanager.Container.Container container
                = new ContainerImpl(null, null, null, null, null, null, cti);
            ContainerId containerId = container.GetContainerId();
            Resource    resource    = container.GetResource();

            @event = new AuxServicesEvent(AuxServicesEventType.ContainerInit, container);
            aux.Handle(@event);
            foreach (AuxiliaryService serv_2 in servs)
            {
                NUnit.Framework.Assert.AreEqual(containerId, ((TestAuxServices.LightService)serv_2
                                                              ).containerId);
                NUnit.Framework.Assert.AreEqual(resource, ((TestAuxServices.LightService)serv_2).
                                                resource);
                ((TestAuxServices.LightService)serv_2).containerId = null;
                ((TestAuxServices.LightService)serv_2).resource    = null;
            }
            @event = new AuxServicesEvent(AuxServicesEventType.ContainerStop, container);
            aux.Handle(@event);
            foreach (AuxiliaryService serv_3 in servs)
            {
                NUnit.Framework.Assert.AreEqual(containerId, ((TestAuxServices.LightService)serv_3
                                                              ).containerId);
                NUnit.Framework.Assert.AreEqual(resource, ((TestAuxServices.LightService)serv_3).
                                                resource);
            }
        }
示例#42
0
        /// <summary>
        /// Attempts to write the report to the given output using the specified
        /// config.
        /// </summary>
        /// <remarks>
        /// Attempts to write the report to the given output using the specified
        /// config. It will open up the expected reducer output file and read in its
        /// contents and then split up by operation output and sort by operation type
        /// and then for each operation type it will generate a report to the specified
        /// result file and the console.
        /// </remarks>
        /// <param name="cfg">the config specifying the files and output</param>
        /// <exception cref="System.Exception">if files can not be opened/closed/read or invalid format
        ///     </exception>
        private void WriteReport(ConfigExtractor cfg)
        {
            Path dn = cfg.GetOutputPath();

            Log.Info("Writing report using contents of " + dn);
            FileSystem fs = dn.GetFileSystem(cfg.GetConfig());

            FileStatus[]   reduceFiles  = fs.ListStatus(dn);
            BufferedReader fileReader   = null;
            PrintWriter    reportWriter = null;

            try
            {
                IList <OperationOutput> noOperations = new AList <OperationOutput>();
                IDictionary <string, IList <OperationOutput> > splitTypes = new SortedDictionary <string
                                                                                                  , IList <OperationOutput> >();
                foreach (FileStatus fn in reduceFiles)
                {
                    if (!fn.GetPath().GetName().StartsWith("part"))
                    {
                        continue;
                    }
                    fileReader = new BufferedReader(new InputStreamReader(new DataInputStream(fs.Open
                                                                                                  (fn.GetPath()))));
                    string line;
                    while ((line = fileReader.ReadLine()) != null)
                    {
                        string[] pieces = line.Split("\t", 2);
                        if (pieces.Length == 2)
                        {
                            OperationOutput data = new OperationOutput(pieces[0], pieces[1]);
                            string          op   = (data.GetOperationType());
                            if (op != null)
                            {
                                IList <OperationOutput> opList = splitTypes[op];
                                if (opList == null)
                                {
                                    opList = new AList <OperationOutput>();
                                }
                                opList.AddItem(data);
                                splitTypes[op] = opList;
                            }
                            else
                            {
                                noOperations.AddItem(data);
                            }
                        }
                        else
                        {
                            throw new IOException("Unparseable line " + line);
                        }
                    }
                    fileReader.Close();
                    fileReader = null;
                }
                FilePath resFile = null;
                if (cfg.GetResultFile() != null)
                {
                    resFile = new FilePath(cfg.GetResultFile());
                }
                if (resFile != null)
                {
                    Log.Info("Report results being placed to logging output and to file " + resFile.GetCanonicalPath
                                 ());
                    reportWriter = new PrintWriter(new FileOutputStream(resFile));
                }
                else
                {
                    Log.Info("Report results being placed to logging output");
                }
                ReportWriter reporter = new ReportWriter();
                if (!noOperations.IsEmpty())
                {
                    reporter.BasicReport(noOperations, reportWriter);
                }
                foreach (string opType in splitTypes.Keys)
                {
                    reporter.OpReport(opType, splitTypes[opType], reportWriter);
                }
            }
            finally
            {
                if (fileReader != null)
                {
                    fileReader.Close();
                }
                if (reportWriter != null)
                {
                    reportWriter.Close();
                }
            }
        }
示例#43
0
文件: Program.cs 项目: bel-uwa/Loyc
        public static void Main(string[] args)
        {
            // Workaround for MS bug: Assert(false) will not fire in debugger
            Debug.Listeners.Clear();
            Debug.Listeners.Add(new DefaultTraceListener());

            Console.WriteLine("Running tests on stable code...");
            // Omitted: unit tests that throw & catch exceptions (those are below)

            // Loyc.Essentials
            RunTests.Run(new ListExtTests());
            RunTests.Run(new MathExTests());
            RunTests.Run(new UStringTests());
            RunTests.Run(new StringExtTests());
            RunTests.Run(new HashTagsTests());
            RunTests.Run(new LocalizeTests());
            RunTests.Run(new SymbolTests());
            RunTests.Run(new ThreadExTests());
            RunTests.Run(new ListTests <InternalList <int> >(false, delegate(int n) { var l = InternalList <int> .Empty; l.Resize(n); return(l); }));
            RunTests.Run(new ListRangeTests <InternalList <int> >(false, delegate() { return(InternalList <int> .Empty); }));
            RunTests.Run(new ListTests <DList <int> >(false, delegate(int n) { var l = new DList <int>(); l.Resize(n); return(l); }));
            RunTests.Run(new DequeTests <DList <int> >(delegate() { return(new DList <int>()); }));
            RunTests.Run(new ListRangeTests <DList <int> >(false, delegate() { return(new DList <int>()); }));

            // Loyc.Collections
            RunTests.Run(new CPTrieTests());
            RunTests.Run(new SimpleCacheTests());
            RunTests.Run(new InvertibleSetTests());
            // Test with small node sizes as well as the standard node size,
            // including the minimum size of 3 (the most problematic size).
            int seed = Environment.TickCount;

            RunTests.Run(new AListTests(false, seed, 8, 8));
            RunTests.Run(new BListTests(false, seed, 3, 3));
            RunTests.Run(new BDictionaryTests(false, seed, 6, 6));
            RunTests.Run(new SparseAListTests(false, seed, 10, 10));
            RunTests.Run(new DequeTests <AList <int> >(delegate() { return(new AList <int>()); }));
            RunTests.Run(new DequeTests <SparseAList <int> >(delegate() { return(new SparseAList <int>()); }));
            RunTests.Run(new DictionaryTests <BDictionary <object, object> >(true, true));
            RunTests.Run(new ListTests <AList <int> >(false, delegate(int n) { var l = new AList <int>(); l.Resize(n); return(l); }));
            RunTests.Run(new ListRangeTests <AList <int> >(false, delegate() { return(new AList <int>()); }, 12345));
            RunTests.Run(new ListTests <SparseAList <int> >(false, delegate(int n) { var l = new SparseAList <int>(); l.Resize(n); return(l); }, 12345));
            RunTests.Run(new ListRangeTests <SparseAList <int> >(false, delegate() { return(new SparseAList <int>()); }, 12345));

            // Loyc.Utilities
            RunTests.Run(new LineMathTests());
            RunTests.Run(new PointMathTests());
            RunTests.Run(new IntSetTests());
            RunTests.Run(new ExtraTagsInWListTests());
            RunTests.Run(new UGTests());

            // Loyc.Syntax
            RunTests.Run(new LesLexerTests());
            RunTests.Run(new LesParserTests());
            RunTests.Run(new TokensToTreeTests());
            RunTests.Run(new StreamCharSourceTests());

            for (;;)
            {
                ConsoleKeyInfo k;

                Console.WriteLine();
                Console.WriteLine("What do you want to do?");
                Console.WriteLine("1. Run unit tests that expect exceptions");
                Console.WriteLine("2. Run unit tests on unstable code");
                Console.WriteLine("9. Benchmarks");
                Console.WriteLine("Z. List encodings");
                Console.WriteLine("Press ESC or ENTER to Quit");
                Console.WriteLine((k = Console.ReadKey(true)).KeyChar);
                if (k.Key == ConsoleKey.Escape || k.Key == ConsoleKey.Enter)
                {
                    break;
                }
                else if (k.KeyChar == '1')
                {
                    // Loyc.Essentials
                    RunTests.Run(new GTests());

                    // Loyc.Utilities
                    RunTests.Run(new GoInterfaceTests());

                    // Loyc.Collections
                    RunTests.Run(new MapTests());
                    RunTests.Run(new SparseAListTests(true, seed, 8, 4));
                    RunTests.Run(new SparseAListTests());
                    RunTests.Run(new AListTests());
                    RunTests.Run(new BListTests());
                    RunTests.Run(new BDictionaryTests());
                    RunTests.Run(new MSetTests());                    // derived from MutableSetTests<MSet<STI>, STI>
                    RunTests.Run(new SymbolSetTests());               // derived from MutableSetTests<MSet<Symbol>, Symbol>
                    RunTests.Run(new ImmSetTests());                  // tests for Set<T>
                    RunTests.Run(new MapTests());                     // derived from DictionaryTests<MMap<object, object>>
                    RunTests.Run(new RWListTests());
                    RunTests.Run(new WListTests());
                    RunTests.Run(new RVListTests());
                    RunTests.Run(new VListTests());
                }
                else if (k.KeyChar == '2')
                {
                    RunTests.Run(new KeylessHashtableTests());
                }
                else if (k.KeyChar == '9')
                {
                    RunBenchmarks();
                }
                else if (k.KeyChar == 'z' || k.KeyChar == 'Z')
                {
                    foreach (EncodingInfo inf in Encoding.GetEncodings())
                    {
                        Console.WriteLine("{0} {1}: {2}", inf.CodePage, inf.Name, inf.DisplayName);
                    }
                }
            }
        }
示例#44
0
        public virtual void TestInMemoryAndOnDiskMerger()
        {
            JobID         jobId     = new JobID("a", 0);
            TaskAttemptID reduceId1 = new TaskAttemptID(new TaskID(jobId, TaskType.Reduce, 0)
                                                        , 0);
            TaskAttemptID                 mapId1       = new TaskAttemptID(new TaskID(jobId, TaskType.Map, 1), 0);
            TaskAttemptID                 mapId2       = new TaskAttemptID(new TaskID(jobId, TaskType.Map, 2), 0);
            LocalDirAllocator             lda          = new LocalDirAllocator(MRConfig.LocalDir);
            MergeManagerImpl <Text, Text> mergeManager = new MergeManagerImpl <Text, Text>(reduceId1
                                                                                           , jobConf, fs, lda, Reporter.Null, null, null, null, null, null, null, null, new
                                                                                           Progress(), new MROutputFiles());
            // write map outputs
            IDictionary <string, string> map1 = new SortedDictionary <string, string>();

            map1["apple"]  = "disgusting";
            map1["carrot"] = "delicious";
            IDictionary <string, string> map2 = new SortedDictionary <string, string>();

            map1["banana"] = "pretty good";
            byte[] mapOutputBytes1 = WriteMapOutput(conf, map1);
            byte[] mapOutputBytes2 = WriteMapOutput(conf, map2);
            InMemoryMapOutput <Text, Text> mapOutput1 = new InMemoryMapOutput <Text, Text>(conf
                                                                                           , mapId1, mergeManager, mapOutputBytes1.Length, null, true);
            InMemoryMapOutput <Text, Text> mapOutput2 = new InMemoryMapOutput <Text, Text>(conf
                                                                                           , mapId2, mergeManager, mapOutputBytes2.Length, null, true);

            System.Array.Copy(mapOutputBytes1, 0, mapOutput1.GetMemory(), 0, mapOutputBytes1.
                              Length);
            System.Array.Copy(mapOutputBytes2, 0, mapOutput2.GetMemory(), 0, mapOutputBytes2.
                              Length);
            // create merger and run merge
            MergeThread <InMemoryMapOutput <Text, Text>, Text, Text> inMemoryMerger = mergeManager
                                                                                      .CreateInMemoryMerger();
            IList <InMemoryMapOutput <Text, Text> > mapOutputs1 = new AList <InMemoryMapOutput <Text
                                                                                                , Text> >();

            mapOutputs1.AddItem(mapOutput1);
            mapOutputs1.AddItem(mapOutput2);
            inMemoryMerger.Merge(mapOutputs1);
            NUnit.Framework.Assert.AreEqual(1, mergeManager.onDiskMapOutputs.Count);
            TaskAttemptID reduceId2 = new TaskAttemptID(new TaskID(jobId, TaskType.Reduce, 3)
                                                        , 0);
            TaskAttemptID mapId3 = new TaskAttemptID(new TaskID(jobId, TaskType.Map, 4), 0);
            TaskAttemptID mapId4 = new TaskAttemptID(new TaskID(jobId, TaskType.Map, 5), 0);
            // write map outputs
            IDictionary <string, string> map3 = new SortedDictionary <string, string>();

            map3["apple"]  = "awesome";
            map3["carrot"] = "amazing";
            IDictionary <string, string> map4 = new SortedDictionary <string, string>();

            map4["banana"] = "bla";
            byte[] mapOutputBytes3 = WriteMapOutput(conf, map3);
            byte[] mapOutputBytes4 = WriteMapOutput(conf, map4);
            InMemoryMapOutput <Text, Text> mapOutput3 = new InMemoryMapOutput <Text, Text>(conf
                                                                                           , mapId3, mergeManager, mapOutputBytes3.Length, null, true);
            InMemoryMapOutput <Text, Text> mapOutput4 = new InMemoryMapOutput <Text, Text>(conf
                                                                                           , mapId4, mergeManager, mapOutputBytes4.Length, null, true);

            System.Array.Copy(mapOutputBytes3, 0, mapOutput3.GetMemory(), 0, mapOutputBytes3.
                              Length);
            System.Array.Copy(mapOutputBytes4, 0, mapOutput4.GetMemory(), 0, mapOutputBytes4.
                              Length);
            //    // create merger and run merge
            MergeThread <InMemoryMapOutput <Text, Text>, Text, Text> inMemoryMerger2 = mergeManager
                                                                                       .CreateInMemoryMerger();
            IList <InMemoryMapOutput <Text, Text> > mapOutputs2 = new AList <InMemoryMapOutput <Text
                                                                                                , Text> >();

            mapOutputs2.AddItem(mapOutput3);
            mapOutputs2.AddItem(mapOutput4);
            inMemoryMerger2.Merge(mapOutputs2);
            NUnit.Framework.Assert.AreEqual(2, mergeManager.onDiskMapOutputs.Count);
            IList <MergeManagerImpl.CompressAwarePath> paths = new AList <MergeManagerImpl.CompressAwarePath
                                                                          >();
            IEnumerator <MergeManagerImpl.CompressAwarePath> iterator = mergeManager.onDiskMapOutputs
                                                                        .GetEnumerator();
            IList <string> keys   = new AList <string>();
            IList <string> values = new AList <string>();

            while (iterator.HasNext())
            {
                MergeManagerImpl.CompressAwarePath next = iterator.Next();
                ReadOnDiskMapOutput(conf, fs, next, keys, values);
                paths.AddItem(next);
            }
            NUnit.Framework.Assert.AreEqual(keys, Arrays.AsList("apple", "banana", "carrot",
                                                                "apple", "banana", "carrot"));
            NUnit.Framework.Assert.AreEqual(values, Arrays.AsList("awesome", "bla", "amazing"
                                                                  , "disgusting", "pretty good", "delicious"));
            mergeManager.Close();
            mergeManager = new MergeManagerImpl <Text, Text>(reduceId2, jobConf, fs, lda, Reporter
                                                             .Null, null, null, null, null, null, null, null, new Progress(), new MROutputFiles
                                                                 ());
            MergeThread <MergeManagerImpl.CompressAwarePath, Text, Text> onDiskMerger = mergeManager
                                                                                        .CreateOnDiskMerger();

            onDiskMerger.Merge(paths);
            NUnit.Framework.Assert.AreEqual(1, mergeManager.onDiskMapOutputs.Count);
            keys   = new AList <string>();
            values = new AList <string>();
            ReadOnDiskMapOutput(conf, fs, mergeManager.onDiskMapOutputs.GetEnumerator().Next(
                                    ), keys, values);
            NUnit.Framework.Assert.AreEqual(keys, Arrays.AsList("apple", "apple", "banana", "banana"
                                                                , "carrot", "carrot"));
            NUnit.Framework.Assert.AreEqual(values, Arrays.AsList("awesome", "disgusting", "pretty good"
                                                                  , "bla", "amazing", "delicious"));
            mergeManager.Close();
            NUnit.Framework.Assert.AreEqual(0, mergeManager.inMemoryMapOutputs.Count);
            NUnit.Framework.Assert.AreEqual(0, mergeManager.inMemoryMergedMapOutputs.Count);
            NUnit.Framework.Assert.AreEqual(0, mergeManager.onDiskMapOutputs.Count);
        }
示例#45
0
 /// <param name="cleanupCommands">the cleanupCommands to set</param>
 public virtual void SetCleanupCommands(AList <CLICommand> cleanupCommands)
 {
     this.cleanupCommands = cleanupCommands;
 }
示例#46
0
 public ArrayListBackedIterator(AList <X> data)
     : base(data)
 {
 }
示例#47
0
 public ArrayNI()
 {
     //super(); // スーパークラスでは何もしていない
     mItems = new AList <Variant>();
 }
示例#48
0
        // An special edit which acts as a sentinel value by marking the end the
        // list of edits
        /// <summary>Does the three way merge between a common base and two sequences.</summary>
        /// <remarks>Does the three way merge between a common base and two sequences.</remarks>
        /// <?></?>
        /// <param name="cmp">comparison method for this execution.</param>
        /// <param name="base">the common base sequence</param>
        /// <param name="ours">the first sequence to be merged</param>
        /// <param name="theirs">the second sequence to be merged</param>
        /// <returns>the resulting content</returns>
        public MergeResult <S> Merge <S>(SequenceComparator <S> cmp, S @base, S ours, S theirs
                                         ) where S : Sequence
        {
            IList <S> sequences = new AList <S>(3);

            sequences.AddItem(@base);
            sequences.AddItem(ours);
            sequences.AddItem(theirs);
            MergeResult <S> result = new MergeResult <S>(sequences);

            if (ours.Size() == 0)
            {
                if (theirs.Size() != 0)
                {
                    EditList theirsEdits = diffAlg.Diff(cmp, @base, theirs);
                    if (!theirsEdits.IsEmpty())
                    {
                        // we deleted, they modified -> Let their complete content
                        // conflict with empty text
                        result.Add(1, 0, 0, MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE);
                        result.Add(2, 0, theirs.Size(), MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE);
                    }
                    else
                    {
                        // we deleted, they didn't modify -> Let our deletion win
                        result.Add(1, 0, 0, MergeChunk.ConflictState.NO_CONFLICT);
                    }
                }
                else
                {
                    // we and they deleted -> return a single chunk of nothing
                    result.Add(1, 0, 0, MergeChunk.ConflictState.NO_CONFLICT);
                }
                return(result);
            }
            else
            {
                if (theirs.Size() == 0)
                {
                    EditList oursEdits = diffAlg.Diff(cmp, @base, ours);
                    if (!oursEdits.IsEmpty())
                    {
                        // we modified, they deleted -> Let our complete content
                        // conflict with empty text
                        result.Add(1, 0, ours.Size(), MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE);
                        result.Add(2, 0, 0, MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE);
                    }
                    else
                    {
                        // they deleted, we didn't modify -> Let their deletion win
                        result.Add(2, 0, 0, MergeChunk.ConflictState.NO_CONFLICT);
                    }
                    return(result);
                }
            }
            EditList        oursEdits_1   = diffAlg.Diff(cmp, @base, ours);
            Iterator <Edit> baseToOurs    = oursEdits_1.Iterator();
            EditList        theirsEdits_1 = diffAlg.Diff(cmp, @base, theirs);
            Iterator <Edit> baseToTheirs  = theirsEdits_1.Iterator();
            int             current       = 0;
            // points to the next line (first line is 0) of base
            // which was not handled yet
            Edit oursEdit   = NextEdit(baseToOurs);
            Edit theirsEdit = NextEdit(baseToTheirs);

            // iterate over all edits from base to ours and from base to theirs
            // leave the loop when there are no edits more for ours or for theirs
            // (or both)
            while (theirsEdit != END_EDIT || oursEdit != END_EDIT)
            {
                if (oursEdit.GetEndA() < theirsEdit.GetBeginA())
                {
                    // something was changed in ours not overlapping with any change
                    // from theirs. First add the common part in front of the edit
                    // then the edit.
                    if (current != oursEdit.GetBeginA())
                    {
                        result.Add(0, current, oursEdit.GetBeginA(), MergeChunk.ConflictState.NO_CONFLICT
                                   );
                    }
                    result.Add(1, oursEdit.GetBeginB(), oursEdit.GetEndB(), MergeChunk.ConflictState.
                               NO_CONFLICT);
                    current  = oursEdit.GetEndA();
                    oursEdit = NextEdit(baseToOurs);
                }
                else
                {
                    if (theirsEdit.GetEndA() < oursEdit.GetBeginA())
                    {
                        // something was changed in theirs not overlapping with any
                        // from ours. First add the common part in front of the edit
                        // then the edit.
                        if (current != theirsEdit.GetBeginA())
                        {
                            result.Add(0, current, theirsEdit.GetBeginA(), MergeChunk.ConflictState.NO_CONFLICT
                                       );
                        }
                        result.Add(2, theirsEdit.GetBeginB(), theirsEdit.GetEndB(), MergeChunk.ConflictState
                                   .NO_CONFLICT);
                        current    = theirsEdit.GetEndA();
                        theirsEdit = NextEdit(baseToTheirs);
                    }
                    else
                    {
                        // here we found a real overlapping modification
                        // if there is a common part in front of the conflict add it
                        if (oursEdit.GetBeginA() != current && theirsEdit.GetBeginA() != current)
                        {
                            result.Add(0, current, Math.Min(oursEdit.GetBeginA(), theirsEdit.GetBeginA()), MergeChunk.ConflictState
                                       .NO_CONFLICT);
                        }
                        // set some initial values for the ranges in A and B which we
                        // want to handle
                        int oursBeginB   = oursEdit.GetBeginB();
                        int theirsBeginB = theirsEdit.GetBeginB();
                        // harmonize the start of the ranges in A and B
                        if (oursEdit.GetBeginA() < theirsEdit.GetBeginA())
                        {
                            theirsBeginB -= theirsEdit.GetBeginA() - oursEdit.GetBeginA();
                        }
                        else
                        {
                            oursBeginB -= oursEdit.GetBeginA() - theirsEdit.GetBeginA();
                        }
                        // combine edits:
                        // Maybe an Edit on one side corresponds to multiple Edits on
                        // the other side. Then we have to combine the Edits of the
                        // other side - so in the end we can merge together two single
                        // edits.
                        //
                        // It is important to notice that this combining will extend the
                        // ranges of our conflict always downwards (towards the end of
                        // the content). The starts of the conflicting ranges in ours
                        // and theirs are not touched here.
                        //
                        // This combining is an iterative process: after we have
                        // combined some edits we have to do the check again. The
                        // combined edits could now correspond to multiple edits on the
                        // other side.
                        //
                        // Example: when this combining algorithm works on the following
                        // edits
                        // oursEdits=((0-5,0-5),(6-8,6-8),(10-11,10-11)) and
                        // theirsEdits=((0-1,0-1),(2-3,2-3),(5-7,5-7))
                        // it will merge them into
                        // oursEdits=((0-8,0-8),(10-11,10-11)) and
                        // theirsEdits=((0-7,0-7))
                        //
                        // Since the only interesting thing to us is how in ours and
                        // theirs the end of the conflicting range is changing we let
                        // oursEdit and theirsEdit point to the last conflicting edit
                        Edit nextOursEdit   = NextEdit(baseToOurs);
                        Edit nextTheirsEdit = NextEdit(baseToTheirs);
                        for (; ;)
                        {
                            if (oursEdit.GetEndA() >= nextTheirsEdit.GetBeginA())
                            {
                                theirsEdit     = nextTheirsEdit;
                                nextTheirsEdit = NextEdit(baseToTheirs);
                            }
                            else
                            {
                                if (theirsEdit.GetEndA() >= nextOursEdit.GetBeginA())
                                {
                                    oursEdit     = nextOursEdit;
                                    nextOursEdit = NextEdit(baseToOurs);
                                }
                                else
                                {
                                    break;
                                }
                            }
                        }
                        // harmonize the end of the ranges in A and B
                        int oursEndB   = oursEdit.GetEndB();
                        int theirsEndB = theirsEdit.GetEndB();
                        if (oursEdit.GetEndA() < theirsEdit.GetEndA())
                        {
                            oursEndB += theirsEdit.GetEndA() - oursEdit.GetEndA();
                        }
                        else
                        {
                            theirsEndB += oursEdit.GetEndA() - theirsEdit.GetEndA();
                        }
                        // A conflicting region is found. Strip off common lines in
                        // in the beginning and the end of the conflicting region
                        // Determine the minimum length of the conflicting areas in OURS
                        // and THEIRS. Also determine how much bigger the conflicting
                        // area in THEIRS is compared to OURS. All that is needed to
                        // limit the search for common areas at the beginning or end
                        // (the common areas cannot be bigger then the smaller
                        // conflicting area. The delta is needed to know whether the
                        // complete conflicting area is common in OURS and THEIRS.
                        int minBSize   = oursEndB - oursBeginB;
                        int BSizeDelta = minBSize - (theirsEndB - theirsBeginB);
                        if (BSizeDelta > 0)
                        {
                            minBSize -= BSizeDelta;
                        }
                        int commonPrefix = 0;
                        while (commonPrefix < minBSize && cmp.Equals(ours, oursBeginB + commonPrefix, theirs
                                                                     , theirsBeginB + commonPrefix))
                        {
                            commonPrefix++;
                        }
                        minBSize -= commonPrefix;
                        int commonSuffix = 0;
                        while (commonSuffix < minBSize && cmp.Equals(ours, oursEndB - commonSuffix - 1, theirs
                                                                     , theirsEndB - commonSuffix - 1))
                        {
                            commonSuffix++;
                        }
                        minBSize -= commonSuffix;
                        // Add the common lines at start of conflict
                        if (commonPrefix > 0)
                        {
                            result.Add(1, oursBeginB, oursBeginB + commonPrefix, MergeChunk.ConflictState.NO_CONFLICT
                                       );
                        }
                        // Add the conflict (Only if there is a conflict left to report)
                        if (minBSize > 0 || BSizeDelta != 0)
                        {
                            result.Add(1, oursBeginB + commonPrefix, oursEndB - commonSuffix, MergeChunk.ConflictState
                                       .FIRST_CONFLICTING_RANGE);
                            result.Add(2, theirsBeginB + commonPrefix, theirsEndB - commonSuffix, MergeChunk.ConflictState
                                       .NEXT_CONFLICTING_RANGE);
                        }
                        // Add the common lines at end of conflict
                        if (commonSuffix > 0)
                        {
                            result.Add(1, oursEndB - commonSuffix, oursEndB, MergeChunk.ConflictState.NO_CONFLICT
                                       );
                        }
                        current    = Math.Max(oursEdit.GetEndA(), theirsEdit.GetEndA());
                        oursEdit   = nextOursEdit;
                        theirsEdit = nextTheirsEdit;
                    }
                }
            }
            // maybe we have a common part behind the last edit: copy it to the
            // result
            if (current < @base.Size())
            {
                result.Add(0, current, @base.Size(), MergeChunk.ConflictState.NO_CONFLICT);
            }
            return(result);
        }
示例#49
0
 /// <exception cref="System.IO.IOException"/>
 internal virtual void GenCppCode(FileWriter hh, FileWriter cc, AList <string> options
                                  )
 {
     ((JRecord.CppRecord)GetCppType()).GenCode(hh, cc, options);
 }
示例#50
0
        /// <param name="f"></param>
        /// <param name="fh"></param>
        /// <exception cref="System.IO.IOException">System.IO.IOException</exception>
        /// <exception cref="NGit.Api.Errors.PatchApplyException">NGit.Api.Errors.PatchApplyException
        ///     </exception>
        private void Apply(FilePath f, FileHeader fh)
        {
            RawText        rt       = new RawText(f);
            IList <string> oldLines = new AList <string>(rt.Size());

            for (int i = 0; i < rt.Size(); i++)
            {
                oldLines.AddItem(rt.GetString(i));
            }
            IList <string> newLines = new AList <string>(oldLines);

            foreach (HunkHeader hh in fh.GetHunks())
            {
                StringBuilder hunk = new StringBuilder();
                for (int j = hh.GetStartOffset(); j < hh.GetEndOffset(); j++)
                {
                    hunk.Append((char)hh.GetBuffer()[j]);
                }
                RawText        hrt       = new RawText(Sharpen.Runtime.GetBytesForString(hunk.ToString()));
                IList <string> hunkLines = new AList <string>(hrt.Size());
                for (int i_1 = 0; i_1 < hrt.Size(); i_1++)
                {
                    hunkLines.AddItem(hrt.GetString(i_1));
                }
                int pos = 0;
                for (int j_1 = 1; j_1 < hunkLines.Count; j_1++)
                {
                    string hunkLine = hunkLines[j_1];
                    switch (hunkLine[0])
                    {
                    case ' ':
                    {
                        if (!newLines[hh.GetNewStartLine() - 1 + pos].Equals(Sharpen.Runtime.Substring(hunkLine
                                                                                                       , 1)))
                        {
                            throw new PatchApplyException(MessageFormat.Format(JGitText.Get().patchApplyException
                                                                               , hh));
                        }
                        pos++;
                        break;
                    }

                    case '-':
                    {
                        if (!newLines[hh.GetNewStartLine() - 1 + pos].Equals(Sharpen.Runtime.Substring(hunkLine
                                                                                                       , 1)))
                        {
                            throw new PatchApplyException(MessageFormat.Format(JGitText.Get().patchApplyException
                                                                               , hh));
                        }
                        newLines.Remove(hh.GetNewStartLine() - 1 + pos);
                        break;
                    }

                    case '+':
                    {
                        newLines.Add(hh.GetNewStartLine() - 1 + pos, Sharpen.Runtime.Substring(hunkLine,
                                                                                               1));
                        pos++;
                        break;
                    }
                    }
                }
            }
            if (!IsNoNewlineAtEndOfFile(fh))
            {
                newLines.AddItem(string.Empty);
            }
            if (!rt.IsMissingNewlineAtEnd())
            {
                oldLines.AddItem(string.Empty);
            }
            if (!IsChanged(oldLines, newLines))
            {
                return;
            }
            // don't touch the file
            StringBuilder sb  = new StringBuilder();
            string        eol = rt.Size() == 0 || (rt.Size() == 1 && rt.IsMissingNewlineAtEnd()) ? "\n"
                                 : rt.GetLineDelimiter();

            foreach (string l in newLines)
            {
                sb.Append(l);
                if (eol != null)
                {
                    sb.Append(eol);
                }
            }
            Sharpen.Runtime.DeleteCharAt(sb, sb.Length - 1);
            FileWriter fw = new FileWriter(f);

            fw.Write(sb.ToString());
            fw.Close();
        }
示例#51
0
 public DictionaryEnumCallback(AList <Variant> items)
 {
     mItems = items;
 }
示例#52
0
 /// <exception cref="System.IO.IOException"/>
 internal virtual void GenJavaCode(string destDir, AList <string> options)
 {
     ((JRecord.JavaRecord)GetJavaType()).GenCode(destDir, options);
 }
        /// <exception cref="Com.Drew.Imaging.Png.PngProcessingException"/>
        /// <exception cref="System.IO.IOException"/>
        private static void ProcessChunk([NotNull] Com.Drew.Metadata.Metadata metadata, [NotNull] PngChunk chunk)
        {
            PngChunkType chunkType = chunk.GetChunkType();

            sbyte[] bytes = chunk.GetBytes();
            if (chunkType.Equals(PngChunkType.Ihdr))
            {
                PngHeader    header    = new PngHeader(bytes);
                PngDirectory directory = new PngDirectory(PngChunkType.Ihdr);
                directory.SetInt(PngDirectory.TagImageWidth, header.GetImageWidth());
                directory.SetInt(PngDirectory.TagImageHeight, header.GetImageHeight());
                directory.SetInt(PngDirectory.TagBitsPerSample, header.GetBitsPerSample());
                directory.SetInt(PngDirectory.TagColorType, header.GetColorType().GetNumericValue());
                directory.SetInt(PngDirectory.TagCompressionType, header.GetCompressionType());
                directory.SetInt(PngDirectory.TagFilterMethod, header.GetFilterMethod());
                directory.SetInt(PngDirectory.TagInterlaceMethod, header.GetInterlaceMethod());
                metadata.AddDirectory(directory);
            }
            else
            {
                if (chunkType.Equals(PngChunkType.Plte))
                {
                    PngDirectory directory = new PngDirectory(PngChunkType.Plte);
                    directory.SetInt(PngDirectory.TagPaletteSize, bytes.Length / 3);
                    metadata.AddDirectory(directory);
                }
                else
                {
                    if (chunkType.Equals(PngChunkType.tRNS))
                    {
                        PngDirectory directory = new PngDirectory(PngChunkType.tRNS);
                        directory.SetInt(PngDirectory.TagPaletteHasTransparency, 1);
                        metadata.AddDirectory(directory);
                    }
                    else
                    {
                        if (chunkType.Equals(PngChunkType.sRGB))
                        {
                            int          srgbRenderingIntent = new SequentialByteArrayReader(bytes).GetInt8();
                            PngDirectory directory           = new PngDirectory(PngChunkType.sRGB);
                            directory.SetInt(PngDirectory.TagSrgbRenderingIntent, srgbRenderingIntent);
                            metadata.AddDirectory(directory);
                        }
                        else
                        {
                            if (chunkType.Equals(PngChunkType.cHRM))
                            {
                                PngChromaticities          chromaticities = new PngChromaticities(bytes);
                                PngChromaticitiesDirectory directory      = new PngChromaticitiesDirectory();
                                directory.SetInt(PngChromaticitiesDirectory.TagWhitePointX, chromaticities.GetWhitePointX());
                                directory.SetInt(PngChromaticitiesDirectory.TagWhitePointX, chromaticities.GetWhitePointX());
                                directory.SetInt(PngChromaticitiesDirectory.TagRedX, chromaticities.GetRedX());
                                directory.SetInt(PngChromaticitiesDirectory.TagRedY, chromaticities.GetRedY());
                                directory.SetInt(PngChromaticitiesDirectory.TagGreenX, chromaticities.GetGreenX());
                                directory.SetInt(PngChromaticitiesDirectory.TagGreenY, chromaticities.GetGreenY());
                                directory.SetInt(PngChromaticitiesDirectory.TagBlueX, chromaticities.GetBlueX());
                                directory.SetInt(PngChromaticitiesDirectory.TagBlueY, chromaticities.GetBlueY());
                                metadata.AddDirectory(directory);
                            }
                            else
                            {
                                if (chunkType.Equals(PngChunkType.gAMA))
                                {
                                    int          gammaInt  = new SequentialByteArrayReader(bytes).GetInt32();
                                    PngDirectory directory = new PngDirectory(PngChunkType.gAMA);
                                    directory.SetDouble(PngDirectory.TagGamma, gammaInt / 100000.0);
                                    metadata.AddDirectory(directory);
                                }
                                else
                                {
                                    if (chunkType.Equals(PngChunkType.iCCP))
                                    {
                                        SequentialReader reader      = new SequentialByteArrayReader(bytes);
                                        string           profileName = reader.GetNullTerminatedString(79);
                                        PngDirectory     directory   = new PngDirectory(PngChunkType.iCCP);
                                        directory.SetString(PngDirectory.TagIccProfileName, profileName);
                                        sbyte compressionMethod = reader.GetInt8();
                                        if (compressionMethod == 0)
                                        {
                                            // Only compression method allowed by the spec is zero: deflate
                                            // This assumes 1-byte-per-char, which it is by spec.
                                            int                 bytesLeft         = bytes.Length - profileName.Length - 2;
                                            sbyte[]             compressedProfile = reader.GetBytes(bytesLeft);
                                            InflaterInputStream inflateStream     = new InflaterInputStream(new ByteArrayInputStream(compressedProfile));
                                            new IccReader().Extract(new RandomAccessStreamReader(inflateStream), metadata);
                                            inflateStream.Close();
                                        }
                                        metadata.AddDirectory(directory);
                                    }
                                    else
                                    {
                                        if (chunkType.Equals(PngChunkType.bKGD))
                                        {
                                            PngDirectory directory = new PngDirectory(PngChunkType.bKGD);
                                            directory.SetByteArray(PngDirectory.TagBackgroundColor, bytes);
                                            metadata.AddDirectory(directory);
                                        }
                                        else
                                        {
                                            if (chunkType.Equals(PngChunkType.tEXt))
                                            {
                                                SequentialReader reader        = new SequentialByteArrayReader(bytes);
                                                string           keyword       = reader.GetNullTerminatedString(79);
                                                int    bytesLeft               = bytes.Length - keyword.Length - 1;
                                                string value                   = reader.GetNullTerminatedString(bytesLeft);
                                                IList <KeyValuePair> textPairs = new AList <KeyValuePair>();
                                                textPairs.Add(new KeyValuePair(keyword, value));
                                                PngDirectory directory = new PngDirectory(PngChunkType.iTXt);
                                                directory.SetObject(PngDirectory.TagTextualData, textPairs);
                                                metadata.AddDirectory(directory);
                                            }
                                            else
                                            {
                                                if (chunkType.Equals(PngChunkType.iTXt))
                                                {
                                                    SequentialReader reader            = new SequentialByteArrayReader(bytes);
                                                    string           keyword           = reader.GetNullTerminatedString(79);
                                                    sbyte            compressionFlag   = reader.GetInt8();
                                                    sbyte            compressionMethod = reader.GetInt8();
                                                    string           languageTag       = reader.GetNullTerminatedString(bytes.Length);
                                                    string           translatedKeyword = reader.GetNullTerminatedString(bytes.Length);
                                                    int    bytesLeft = bytes.Length - keyword.Length - 1 - 1 - 1 - languageTag.Length - 1 - translatedKeyword.Length - 1;
                                                    string text      = null;
                                                    if (compressionFlag == 0)
                                                    {
                                                        text = reader.GetNullTerminatedString(bytesLeft);
                                                    }
                                                    else
                                                    {
                                                        if (compressionFlag == 1)
                                                        {
                                                            if (compressionMethod == 0)
                                                            {
                                                                text = StringUtil.FromStream(new InflaterInputStream(new ByteArrayInputStream(bytes, bytes.Length - bytesLeft, bytesLeft)));
                                                            }
                                                            else
                                                            {
                                                                PngDirectory directory = new PngDirectory(PngChunkType.iTXt);
                                                                directory.AddError("Invalid compression method value");
                                                                metadata.AddDirectory(directory);
                                                            }
                                                        }
                                                        else
                                                        {
                                                            PngDirectory directory = new PngDirectory(PngChunkType.iTXt);
                                                            directory.AddError("Invalid compression flag value");
                                                            metadata.AddDirectory(directory);
                                                        }
                                                    }
                                                    if (text != null)
                                                    {
                                                        if (keyword.Equals("XML:com.adobe.xmp"))
                                                        {
                                                            // NOTE in testing images, the XMP has parsed successfully, but we are not extracting tags from it as necessary
                                                            new XmpReader().Extract(text, metadata);
                                                        }
                                                        else
                                                        {
                                                            IList <KeyValuePair> textPairs = new AList <KeyValuePair>();
                                                            textPairs.Add(new KeyValuePair(keyword, text));
                                                            PngDirectory directory = new PngDirectory(PngChunkType.iTXt);
                                                            directory.SetObject(PngDirectory.TagTextualData, textPairs);
                                                            metadata.AddDirectory(directory);
                                                        }
                                                    }
                                                }
                                                else
                                                {
                                                    if (chunkType.Equals(PngChunkType.tIME))
                                                    {
                                                        SequentialByteArrayReader reader = new SequentialByteArrayReader(bytes);
                                                        int year   = reader.GetUInt16();
                                                        int month  = reader.GetUInt8() - 1;
                                                        int day    = reader.GetUInt8();
                                                        int hour   = reader.GetUInt8();
                                                        int minute = reader.GetUInt8();
                                                        int second = reader.GetUInt8();
                                                        Sharpen.Calendar calendar = Sharpen.Calendar.GetInstance(Sharpen.Extensions.GetTimeZone("UTC"));
                                                        //noinspection MagicConstant
                                                        calendar.Set(year, month, day, hour, minute, second);
                                                        PngDirectory directory = new PngDirectory(PngChunkType.tIME);
                                                        directory.SetDate(PngDirectory.TagLastModificationTime, calendar.GetTime());
                                                        metadata.AddDirectory(directory);
                                                    }
                                                    else
                                                    {
                                                        if (chunkType.Equals(PngChunkType.pHYs))
                                                        {
                                                            SequentialByteArrayReader reader = new SequentialByteArrayReader(bytes);
                                                            int          pixelsPerUnitX      = reader.GetInt32();
                                                            int          pixelsPerUnitY      = reader.GetInt32();
                                                            sbyte        unitSpecifier       = reader.GetInt8();
                                                            PngDirectory directory           = new PngDirectory(PngChunkType.pHYs);
                                                            directory.SetInt(PngDirectory.TagPixelsPerUnitX, pixelsPerUnitX);
                                                            directory.SetInt(PngDirectory.TagPixelsPerUnitY, pixelsPerUnitY);
                                                            directory.SetInt(PngDirectory.TagUnitSpecifier, unitSpecifier);
                                                            metadata.AddDirectory(directory);
                                                        }
                                                        else
                                                        {
                                                            if (chunkType.Equals(PngChunkType.sBIT))
                                                            {
                                                                PngDirectory directory = new PngDirectory(PngChunkType.sBIT);
                                                                directory.SetByteArray(PngDirectory.TagSignificantBits, bytes);
                                                                metadata.AddDirectory(directory);
                                                            }
                                                        }
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
示例#54
0
 public ArrayListBackedIterator(AList <X> data)
 {
     this.data = data;
     this.iter = this.data.GetEnumerator();
 }
示例#55
0
        /// <exception cref="System.Exception"/>
        public override int Run(string[] args)
        {
            // -directlyAccessNodeLabelStore is a additional option for node label
            // access, so just search if we have specified this option, and remove it
            IList <string> argsList = new AList <string>();

            for (int i = 0; i < args.Length; i++)
            {
                if (args[i].Equals("-directlyAccessNodeLabelStore"))
                {
                    directlyAccessNodeLabelStore = true;
                }
                else
                {
                    argsList.AddItem(args[i]);
                }
            }
            args = Sharpen.Collections.ToArray(argsList, new string[0]);
            YarnConfiguration yarnConf = GetConf() == null ? new YarnConfiguration() : new YarnConfiguration
                                             (GetConf());
            bool isHAEnabled = yarnConf.GetBoolean(YarnConfiguration.RmHaEnabled, YarnConfiguration
                                                   .DefaultRmHaEnabled);

            if (args.Length < 1)
            {
                PrintUsage(string.Empty, isHAEnabled);
                return(-1);
            }
            int    exitCode = -1;
            int    i_1      = 0;
            string cmd      = args[i_1++];

            exitCode = 0;
            if ("-help".Equals(cmd))
            {
                if (i_1 < args.Length)
                {
                    PrintUsage(args[i_1], isHAEnabled);
                }
                else
                {
                    PrintHelp(string.Empty, isHAEnabled);
                }
                return(exitCode);
            }
            if (Usage.Contains(cmd))
            {
                if (isHAEnabled)
                {
                    return(base.Run(args));
                }
                System.Console.Out.WriteLine("Cannot run " + cmd + " when ResourceManager HA is not enabled"
                                             );
                return(-1);
            }
            //
            // verify that we have enough command line parameters
            //
            if ("-refreshAdminAcls".Equals(cmd) || "-refreshQueues".Equals(cmd) || "-refreshNodes"
                .Equals(cmd) || "-refreshServiceAcl".Equals(cmd) || "-refreshUserToGroupsMappings"
                .Equals(cmd) || "-refreshSuperUserGroupsConfiguration".Equals(cmd))
            {
                if (args.Length != 1)
                {
                    PrintUsage(cmd, isHAEnabled);
                    return(exitCode);
                }
            }
            try
            {
                if ("-refreshQueues".Equals(cmd))
                {
                    exitCode = RefreshQueues();
                }
                else
                {
                    if ("-refreshNodes".Equals(cmd))
                    {
                        exitCode = RefreshNodes();
                    }
                    else
                    {
                        if ("-refreshUserToGroupsMappings".Equals(cmd))
                        {
                            exitCode = RefreshUserToGroupsMappings();
                        }
                        else
                        {
                            if ("-refreshSuperUserGroupsConfiguration".Equals(cmd))
                            {
                                exitCode = RefreshSuperUserGroupsConfiguration();
                            }
                            else
                            {
                                if ("-refreshAdminAcls".Equals(cmd))
                                {
                                    exitCode = RefreshAdminAcls();
                                }
                                else
                                {
                                    if ("-refreshServiceAcl".Equals(cmd))
                                    {
                                        exitCode = RefreshServiceAcls();
                                    }
                                    else
                                    {
                                        if ("-getGroups".Equals(cmd))
                                        {
                                            string[] usernames = Arrays.CopyOfRange(args, i_1, args.Length);
                                            exitCode = GetGroups(usernames);
                                        }
                                        else
                                        {
                                            if ("-addToClusterNodeLabels".Equals(cmd))
                                            {
                                                if (i_1 >= args.Length)
                                                {
                                                    System.Console.Error.WriteLine(NoLabelErrMsg);
                                                    exitCode = -1;
                                                }
                                                else
                                                {
                                                    exitCode = AddToClusterNodeLabels(args[i_1]);
                                                }
                                            }
                                            else
                                            {
                                                if ("-removeFromClusterNodeLabels".Equals(cmd))
                                                {
                                                    if (i_1 >= args.Length)
                                                    {
                                                        System.Console.Error.WriteLine(NoLabelErrMsg);
                                                        exitCode = -1;
                                                    }
                                                    else
                                                    {
                                                        exitCode = RemoveFromClusterNodeLabels(args[i_1]);
                                                    }
                                                }
                                                else
                                                {
                                                    if ("-replaceLabelsOnNode".Equals(cmd))
                                                    {
                                                        if (i_1 >= args.Length)
                                                        {
                                                            System.Console.Error.WriteLine(NoMappingErrMsg);
                                                            exitCode = -1;
                                                        }
                                                        else
                                                        {
                                                            exitCode = ReplaceLabelsOnNodes(args[i_1]);
                                                        }
                                                    }
                                                    else
                                                    {
                                                        exitCode = -1;
                                                        System.Console.Error.WriteLine(Sharpen.Runtime.Substring(cmd, 1) + ": Unknown command"
                                                                                       );
                                                        PrintUsage(string.Empty, isHAEnabled);
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
            catch (ArgumentException arge)
            {
                exitCode = -1;
                System.Console.Error.WriteLine(Sharpen.Runtime.Substring(cmd, 1) + ": " + arge.GetLocalizedMessage
                                                   ());
                PrintUsage(cmd, isHAEnabled);
            }
            catch (RemoteException e)
            {
                //
                // This is a error returned by hadoop server. Print
                // out the first line of the error mesage, ignore the stack trace.
                exitCode = -1;
                try
                {
                    string[] content;
                    content = e.GetLocalizedMessage().Split("\n");
                    System.Console.Error.WriteLine(Sharpen.Runtime.Substring(cmd, 1) + ": " + content
                                                   [0]);
                }
                catch (Exception ex)
                {
                    System.Console.Error.WriteLine(Sharpen.Runtime.Substring(cmd, 1) + ": " + ex.GetLocalizedMessage
                                                       ());
                }
            }
            catch (Exception e)
            {
                exitCode = -1;
                System.Console.Error.WriteLine(Sharpen.Runtime.Substring(cmd, 1) + ": " + e.GetLocalizedMessage
                                                   ());
            }
            if (null != localNodeLabelsManager)
            {
                localNodeLabelsManager.Stop();
            }
            return(exitCode);
        }
示例#56
0
        /// <summary>
        /// Verify a DN remains in DECOMMISSION_INPROGRESS state if it is marked
        /// as dead before decommission has completed.
        /// </summary>
        /// <remarks>
        /// Verify a DN remains in DECOMMISSION_INPROGRESS state if it is marked
        /// as dead before decommission has completed. That will allow DN to resume
        /// the replication process after it rejoins the cluster.
        /// </remarks>
        /// <exception cref="System.Exception"/>
        public virtual void TestDecommissionStatusAfterDNRestart()
        {
            DistributedFileSystem fileSys = (DistributedFileSystem)cluster.GetFileSystem();
            // Create a file with one block. That block has one replica.
            Path f = new Path("decommission.dat");

            DFSTestUtil.CreateFile(fileSys, f, fileSize, fileSize, fileSize, (short)1, seed);
            // Find the DN that owns the only replica.
            RemoteIterator <LocatedFileStatus> fileList = fileSys.ListLocatedStatus(f);

            BlockLocation[] blockLocations = fileList.Next().GetBlockLocations();
            string          dnName         = blockLocations[0].GetNames()[0];
            // Decommission the DN.
            FSNamesystem    fsn = cluster.GetNamesystem();
            DatanodeManager dm  = fsn.GetBlockManager().GetDatanodeManager();

            DecommissionNode(fsn, localFileSys, dnName);
            dm.RefreshNodes(conf);
            // Stop the DN when decommission is in progress.
            // Given DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY is to 1 and the size of
            // the block, it will take much longer time that test timeout value for
            // the decommission to complete. So when stopDataNode is called,
            // decommission should be in progress.
            MiniDFSCluster.DataNodeProperties dataNodeProperties = cluster.StopDataNode(dnName
                                                                                        );
            IList <DatanodeDescriptor> dead = new AList <DatanodeDescriptor>();

            while (true)
            {
                dm.FetchDatanodes(null, dead, false);
                if (dead.Count == 1)
                {
                    break;
                }
                Sharpen.Thread.Sleep(1000);
            }
            // Force removal of the dead node's blocks.
            BlockManagerTestUtil.CheckHeartbeat(fsn.GetBlockManager());
            // Force DatanodeManager to check decommission state.
            BlockManagerTestUtil.RecheckDecommissionState(dm);
            // Verify that the DN remains in DECOMMISSION_INPROGRESS state.
            NUnit.Framework.Assert.IsTrue("the node should be DECOMMISSION_IN_PROGRESSS", dead
                                          [0].IsDecommissionInProgress());
            // Check DatanodeManager#getDecommissionNodes, make sure it returns
            // the node as decommissioning, even if it's dead
            IList <DatanodeDescriptor> decomlist = dm.GetDecommissioningNodes();

            NUnit.Framework.Assert.IsTrue("The node should be be decommissioning", decomlist.
                                          Count == 1);
            // Delete the under-replicated file, which should let the
            // DECOMMISSION_IN_PROGRESS node become DECOMMISSIONED
            CleanupFile(fileSys, f);
            BlockManagerTestUtil.RecheckDecommissionState(dm);
            NUnit.Framework.Assert.IsTrue("the node should be decommissioned", dead[0].IsDecommissioned
                                              ());
            // Add the node back
            cluster.RestartDataNode(dataNodeProperties, true);
            cluster.WaitActive();
            // Call refreshNodes on FSNamesystem with empty exclude file.
            // This will remove the datanodes from decommissioning list and
            // make them available again.
            WriteConfigFile(localFileSys, excludeFile, null);
            dm.RefreshNodes(conf);
        }
        public void TestViewCollationRaw()
        {
            IList <object> list1 = new AList <object>();

            list1.AddItem("a");
            IList <object> list2 = new AList <object>();

            list2.AddItem("b");
            IList <object> list3 = new AList <object>();

            list3.AddItem("b");
            list3.AddItem("c");
            IList <object> list4 = new AList <object>();

            list4.AddItem("b");
            list4.AddItem("c");
            list4.AddItem("a");
            IList <object> list5 = new AList <object>();

            list5.AddItem("b");
            list5.AddItem("d");
            IList <object> list6 = new AList <object>();

            list6.AddItem("b");
            list6.AddItem("d");
            list6.AddItem("e");

            // Based on CouchDB's "view_collation.js" test
            IList <object> testKeys = new AList <object>();

            testKeys.AddItem(0);
            testKeys.AddItem(2.5);
            testKeys.AddItem(10);
            testKeys.AddItem(false);
            testKeys.AddItem(null);
            testKeys.AddItem(true);
            testKeys.AddItem(list1);
            testKeys.AddItem(list2);
            testKeys.AddItem(list3);
            testKeys.AddItem(list4);
            testKeys.AddItem(list5);
            testKeys.AddItem(list6);
            testKeys.AddItem(" ");
            testKeys.AddItem("A");
            testKeys.AddItem("B");
            testKeys.AddItem("_");
            testKeys.AddItem("a");
            testKeys.AddItem("aa");
            testKeys.AddItem("b");
            testKeys.AddItem("ba");
            testKeys.AddItem("bb");
            testKeys.AddItem("~");

            int i = 0;

            foreach (object key in testKeys)
            {
                IDictionary <string, object> docProperties = new Dictionary <string, object>();
                docProperties.Put("_id", Sharpen.Extensions.ToString(i++));
                docProperties["name"] = key;
                PutDoc(database, docProperties);
            }

            View view = database.GetView("default/names");

            view.SetMapReduce((document, emitter) =>
                              emitter(document["name"], null), null, "1.0");

            view.Collation = ViewCollation.Raw;

            QueryOptions options = new QueryOptions();

            IList <QueryRow> rows = view.QueryWithOptions(options).ToList();

            i = 0;
            foreach (QueryRow row in rows)
            {
                Assert.AreEqual(testKeys[i++], row.Key);
            }
            database.Close();
        }
示例#58
0
        /// <summary>Construct the merge commit message.</summary>
        /// <remarks>Construct the merge commit message.</remarks>
        /// <param name="refsToMerge">the refs which will be merged</param>
        /// <param name="target">the branch ref which will be merged into</param>
        /// <returns>merge commit message</returns>
        public virtual string Format(IList <Ref> refsToMerge, Ref target)
        {
            StringBuilder sb = new StringBuilder();

            sb.Append("Merge ");
            IList <string> branches       = new AList <string>();
            IList <string> remoteBranches = new AList <string>();
            IList <string> tags           = new AList <string>();
            IList <string> commits        = new AList <string>();
            IList <string> others         = new AList <string>();

            foreach (Ref @ref in refsToMerge)
            {
                if (@ref.GetName().StartsWith(Constants.R_HEADS))
                {
                    branches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                }
                else
                {
                    if (@ref.GetName().StartsWith(Constants.R_REMOTES))
                    {
                        remoteBranches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                    }
                    else
                    {
                        if (@ref.GetName().StartsWith(Constants.R_TAGS))
                        {
                            tags.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                        }
                        else
                        {
                            if (@ref.GetName().Equals(@ref.GetObjectId().GetName()))
                            {
                                commits.AddItem("'" + @ref.GetName() + "'");
                            }
                            else
                            {
                                others.AddItem(@ref.GetName());
                            }
                        }
                    }
                }
            }
            IList <string> listings = new AList <string>();

            if (!branches.IsEmpty())
            {
                listings.AddItem(JoinNames(branches, "branch", "branches"));
            }
            if (!remoteBranches.IsEmpty())
            {
                listings.AddItem(JoinNames(remoteBranches, "remote branch", "remote branches"));
            }
            if (!tags.IsEmpty())
            {
                listings.AddItem(JoinNames(tags, "tag", "tags"));
            }
            if (!commits.IsEmpty())
            {
                listings.AddItem(JoinNames(commits, "commit", "commits"));
            }
            if (!others.IsEmpty())
            {
                listings.AddItem(StringUtils.Join(others, ", ", " and "));
            }
            sb.Append(StringUtils.Join(listings, ", "));
            string targetName = target.GetLeaf().GetName();

            if (!targetName.Equals(Constants.R_HEADS + Constants.MASTER))
            {
                string targetShortName = Repository.ShortenRefName(targetName);
                sb.Append(" into " + targetShortName);
            }
            return(sb.ToString());
        }
示例#59
0
        /// <summary>
        /// <inheritDoc/>
        ///
        /// </summary>
        /// <exception cref="System.IO.IOException"/>
        public override IList <InputSplit> GetSplits(JobContext job)
        {
            ResultSet results   = null;
            Statement statement = null;

            try
            {
                statement = connection.CreateStatement();
                results   = statement.ExecuteQuery(GetCountQuery());
                results.Next();
                long count     = results.GetLong(1);
                int  chunks    = job.GetConfiguration().GetInt(MRJobConfig.NumMaps, 1);
                long chunkSize = (count / chunks);
                results.Close();
                statement.Close();
                IList <InputSplit> splits = new AList <InputSplit>();
                // Split the rows into n-number of chunks and adjust the last chunk
                // accordingly
                for (int i = 0; i < chunks; i++)
                {
                    DBInputFormat.DBInputSplit split;
                    if ((i + 1) == chunks)
                    {
                        split = new DBInputFormat.DBInputSplit(i * chunkSize, count);
                    }
                    else
                    {
                        split = new DBInputFormat.DBInputSplit(i * chunkSize, (i * chunkSize) + chunkSize
                                                               );
                    }
                    splits.AddItem(split);
                }
                connection.Commit();
                return(splits);
            }
            catch (SQLException e)
            {
                throw new IOException("Got SQLException", e);
            }
            finally
            {
                try
                {
                    if (results != null)
                    {
                        results.Close();
                    }
                }
                catch (SQLException)
                {
                }
                try
                {
                    if (statement != null)
                    {
                        statement.Close();
                    }
                }
                catch (SQLException)
                {
                }
                CloseConnection();
            }
        }
示例#60
0
 /// <param name="testCommands">the testCommands to set</param>
 public virtual void SetTestCommands(AList <CLICommand> testCommands)
 {
     this.testCommands = testCommands;
 }