/// <summary>
 /// Elimina entidades (usuarios o grupos) a la regla con el id especificado
 /// </summary>
 /// <param name="rule"></param>
 /// <param name="entities"></param>
 public static IObservable<Rule> DeleteEntities(Rule rule, IList<IEntity> entities)
 {
     if (rule.Id == null) throw new InvalidDataException("The provided rule must have an Id");
     if (entities == null || entities.IsEmpty())
         return Observable.Defer(() => Observable.Return(rule));
     return RestEndpointFactory
         .Create<IRulesEndpoint>(SessionManager.Instance.CurrentLoggedUser)
         .DeleteEntities(rule.Id, entities.IsEmpty()
             ? "0"
             : entities.ToString(e => e.Id)).ToObservable()
         .SubscribeOn(ThreadPoolScheduler.Instance)
         .InterpretingErrors();
 }
        public SignatureHelpItems(
            IList<SignatureHelpItem> items,
            TextSpan applicableSpan,
            int argumentIndex,
            int argumentCount,
            string argumentName,
            int? selectedItem = null)
        {
            Contract.ThrowIfNull(items);
            Contract.ThrowIfTrue(items.IsEmpty());
            Contract.ThrowIfTrue(selectedItem.HasValue && selectedItem.Value > items.Count);

            if (argumentIndex < 0)
            {
                throw new ArgumentException($"{nameof(argumentIndex)} < 0", nameof(argumentIndex));
            }

            if (argumentCount < argumentIndex)
            {
                throw new ArgumentException($"{nameof(argumentCount)} < {nameof(argumentIndex)}", nameof(argumentIndex));
            }

            this.Items = items;
            this.ApplicableSpan = applicableSpan;
            this.ArgumentIndex = argumentIndex;
            this.ArgumentCount = argumentCount;
            this.SelectedItemIndex = selectedItem;
            this.ArgumentName = argumentName;
        }
Beispiel #3
1
        public GroupConcat(bool distinct,
                           IList<IExpression> exprList,
                           IExpression orderBy,
                           bool isDesc,
                           IList<IExpression> appendedColumnNames,
                           string separator)
            : base("GROUP_CONCAT", exprList)
        {
            IsDistinct = distinct;
            OrderBy = orderBy;
            IsDesc = isDesc;
            if (appendedColumnNames == null || appendedColumnNames.IsEmpty())
            {
                AppendedColumnNames = new List<IExpression>(0);
            }
            else if (appendedColumnNames is List<IExpression>)
            {
                AppendedColumnNames = appendedColumnNames;
            }
            else
            {
                AppendedColumnNames = new List<IExpression>(
                    appendedColumnNames);
            }

            Separator = separator ?? ",";
        }
Beispiel #4
1
 public IndexHint(IndexHintAction hintAction,
                  IndexHintType hintType,
                  IndexHintScope hintScope,
                  IList<string> indexList)
 {
     if (hintAction == IndexHintAction.None)
     {
         throw new ArgumentException("index hint hintAction is null");
     }
     if (hintType == IndexHintType.None)
     {
         throw new ArgumentException("index hint hintType is null");
     }
     if (hintScope == IndexHintScope.None)
     {
         throw new ArgumentException("index hint hintScope is null");
     }
     HintAction = hintAction;
     IndexType = hintType;
     HintScope = hintScope;
     if (indexList == null || indexList.IsEmpty())
     {
         IndexList = new List<string>(0);
     }
     else if (indexList is List<string>)
     {
         IndexList = indexList;
     }
     else
     {
         IndexList = new List<string>(indexList);
     }
 }
Beispiel #5
1
        /// <summary>
        /// Dient eenmalig uitgevoerd te worden
        /// </summary>
        private void InitializeTiles(ISessionFactory factory)
        {
            Random random = new Random();
            int result = 0;
            var nhSession = factory.OpenSession();

            MapTiles = nhSession.QueryOver<MapTile>().List();
            if (MapTiles.IsEmpty())
            {
                MapTiles = new List<MapTile>();
                using (var transaction = nhSession.BeginTransaction())
                {
                    for (int x = 0; x < 8; x++)
                    {
                        for (int y = 0; y < 8; y++)
                        {
                            result = random.Next(0, 10);
                            var tile = new MapTile() {Name = "Wasteland", X = x, Y = y};
                            MapTiles.Add(tile);
                            nhSession.Save(tile);
                        }
                    }

                    transaction.Commit();
                }
            }
        }
 /// <exception cref="System.SqlSyntaxErrorException" />
 public TableReferences(IList<TableReference> list)
 {
     if (list == null || list.IsEmpty())
     {
         throw new SqlSyntaxErrorException("at least one table reference");
     }
     _list = EnsureListType(list);
 }
        public ActionResult CommentColumns(string id, IList<Column> columns)
        {
            if (!columns.IsEmpty())
            {
                foreach (var item in columns)
                {
                    this.DynamicQuery.Provider.DbMetadata.CommentColumn(id, item.Name, item.Description);
                }
            }

            return CloseDialogWithAlert("修改成功!");
        }
 public TableRuleConfig(string name, IList<RuleConfig> rules)
 {
     if (name == null)
     {
         throw new ArgumentException("name is null");
     }
     this.name = name;
     if (rules == null || rules.IsEmpty())
     {
         throw new ArgumentException("no rule is found");
     }
     this.rules = new List<RuleConfig>(rules).AsReadOnly();
 }
        public IndexDefinition(IndexType indexType,
                               IList<IndexColumnName> columns,
                               IList<IndexOption> options)
        {
            IndexType = indexType;
            if (columns == null || columns.IsEmpty())
            {
                throw new ArgumentException("columns is null or empty");
            }

            Columns = columns;
            Options = options == null || options.IsEmpty() ? new List<IndexOption>(0) : options;
        }
 public DalSetStatement(IList<Pair<VariableExpression, IExpression>> assignmentList)
 {
     if (assignmentList == null || assignmentList.IsEmpty())
     {
         AssignmentList = new List<Pair<VariableExpression, IExpression>>(0);
     }
     else if (assignmentList is List<Pair<VariableExpression, IExpression>>)
     {
         AssignmentList = assignmentList;
     }
     else
     {
         AssignmentList = new List<Pair<VariableExpression, IExpression>>(assignmentList);
     }
 }
 public FunctionExpression(string functionName, IList<IExpression> arguments)
 {
     this.functionName = functionName;
     if (arguments == null || arguments.IsEmpty())
     {
         this.arguments = new List<IExpression>(0);
     }
     else if (arguments is List<IExpression>)
     {
         this.arguments = arguments;
     }
     else
     {
         this.arguments = new List<IExpression>(arguments);
     }
 }
        public SignatureHelpItems(
            IList<SignatureHelpItem> items,
            TextSpan applicableSpan,
            int argumentIndex,
            int argumentCount,
            string argumentName,
            int? selectedItem = null)
        {
            Contract.ThrowIfNull(items);
            Contract.ThrowIfTrue(items.IsEmpty());
            Contract.ThrowIfTrue(selectedItem.HasValue && selectedItem.Value >= items.Count);

            if (argumentIndex < 0)
            {
                throw new ArgumentException($"{nameof(argumentIndex)} < 0. {argumentIndex} < 0", nameof(argumentIndex));
            }

            if (argumentCount < argumentIndex)
            {
                throw new ArgumentException($"{nameof(argumentCount)} < {nameof(argumentIndex)}. {argumentCount} < {argumentIndex}", nameof(argumentIndex));
            }

            // Adjust the `selectedItem` index if duplicates are able to be removed.
            var distinctItems = items.Distinct().ToList();
            if (selectedItem.HasValue && items.Count != distinctItems.Count)
            {
                // `selectedItem` index has already been determined to be valid, it now needs to be adjusted to point
                // to the equivalent item in the reduced list to account for duplicates being removed
                // E.g.,
                //   items = {A, A, B, B, C, D}
                //   selectedItem = 4 (index for item C)
                // ergo
                //   distinctItems = {A, B, C, D}
                //   actualItem = C
                //   selectedItem = 2 (index for item C)
                var actualItem = items[selectedItem.Value];
                selectedItem = distinctItems.IndexOf(actualItem);
                Debug.Assert(selectedItem.Value >= 0, "actual item was not part of the final list");
            }

            this.Items = distinctItems;
            this.ApplicableSpan = applicableSpan;
            this.ArgumentIndex = argumentIndex;
            this.ArgumentCount = argumentCount;
            this.SelectedItemIndex = selectedItem;
            this.ArgumentName = argumentName;
        }
Beispiel #13
0
 public TableRefFactor(Identifier table, string alias, IList<IndexHint> hintList)
     : base(alias)
 {
     this.table = table;
     if (hintList == null || hintList.IsEmpty())
     {
         HintList = new List<IndexHint>(0);
     }
     else if (hintList is List<IndexHint>)
     {
         HintList = hintList;
     }
     else
     {
         HintList = new List<IndexHint>(hintList);
     }
 }
 public DdlDropTableStatement(IList<Identifier> tableNames,
                              bool temp,
                              bool ifExists,
                              DropTableMode dropTableMode)
 {
     if (tableNames == null || tableNames.IsEmpty())
     {
         TableNames = new List<Identifier>(0);
     }
     else
     {
         TableNames = tableNames;
     }
     IsTemp = temp;
     IsIfExists = ifExists;
     Mode = dropTableMode;
 }
Beispiel #15
0
 public RowExpression(IList<IExpression> rowExprList)
 {
     if (rowExprList == null || rowExprList.IsEmpty())
     {
         RowExprList = new List<IExpression>(0);
     }
     else
     {
         if (rowExprList is List<IExpression>)
         {
             RowExprList = rowExprList;
         }
         else
         {
             RowExprList = new List<IExpression>(rowExprList);
         }
     }
 }
Beispiel #16
0
        public ShowProfile(IList<ProfileType> types, IExpression forQuery, Limit limit)
        {
            if (types == null || types.IsEmpty())
            {
                ProfileTypes = new List<ProfileType>(0);
            }
            else if (types is List<ProfileType>)
            {
                ProfileTypes = types;
            }
            else
            {
                ProfileTypes = new List<ProfileType>(types);
            }

            ForQuery = forQuery;
            Limit = limit;
        }
Beispiel #17
0
        /// <summary>
        /// 数据查询操作。
        /// </summary>
        /// <param name="id">表名称</param>
        /// <param name="conditions">查询条件配置信息</param>
        /// <param name="pageIndex">当前页</param>
        /// <returns>返回数据查询页面</returns>
        public ActionResult Index(string id, IList<ConditionInfo> conditions = null, int pageIndex = 1)
        {
            var model = new SearchModel
            {
                Table = this.DynamicQuery.Provider.DbMetadata.GetTable(id)
            };

            if (model.Table != null)
            {
                model.Columns = this.DynamicQuery.Provider.DbMetadata.GetColumns(id);
                model.Search = this.DynamicQuery.Where<SearchInfo>(m => m.TableName, id).Single();
                model.Conditions = this.DynamicQuery.Where<ConditionInfo>(m => m.TableName, id).List();

                var showColumns = model.Search?.GetVisibleColumns();
                var orders = this.DynamicQuery.Where<OrderInfo>(m => m.TableName, id).List().Select(o => (Order)o);

                int totalRecords;
                if (conditions.IsEmpty())
                {
                    model.DataSource = this.DynamicQuery.OrderBy(orders.ToArray()).PagedList(id, pageIndex < 1 ? 1 : pageIndex, SearchObject.DefalutPageSize, out totalRecords, showColumns);
                }
                else
                {
                    var temps = conditions.Select(c => (Condition)c);

                    model.Conditions = conditions.MergeDatas(model.Conditions, (i1, i2) => i1.Column == i2.Column, (i1, i2) =>
                    {
                        i1.Id = i2.Id;
                        i1.TableName = i2.TableName;
                        i1.Op = i2.Op;
                        i1.ValidateRule = i2.ValidateRule;
                        i1.DictionaryKey = i2.DictionaryKey;
                    }) as IList<ConditionInfo>;

                    model.DataSource = this.DynamicQuery.Where(temps).OrderBy(orders.ToArray()).PagedList(id, pageIndex < 1 ? 1 : pageIndex, SearchObject.DefalutPageSize, out totalRecords, showColumns);
                }

                model.TotalRecords = totalRecords;
            }

            this.ViewBag.Model = model;

            return View();
        }
 public DmlCallStatement(Identifier procedure, IList<IExpression> arguments)
 {
     this.procedure = procedure;
     if (arguments == null || arguments.IsEmpty())
     {
         this.arguments = new List<IExpression>(0);
     }
     else
     {
         if (arguments is List<IExpression>)
         {
             this.arguments = arguments;
         }
         else
         {
             this.arguments = new List<IExpression>(arguments);
         }
     }
 }
 /// <param name="_matchModifier">never null</param>
 public MatchExpression(IList<IExpression> columns, IExpression pattern,
                        MatchModifier _matchModifier)
 {
     if (columns == null || columns.IsEmpty())
     {
         Columns = new List<IExpression>(0);
     }
     else
     {
         if (columns is List<IExpression>)
         {
             Columns = columns;
         }
         else
         {
             Columns = new List<IExpression>(columns);
         }
     }
     Pattern = pattern;
     Modifier = _matchModifier;
 }
        public virtual void TestLnrmMatching()
        {
            MultiWordStringMatcher entityMatcher = new MultiWordStringMatcher(MultiWordStringMatcher.MatchType.Lnrm);
            string          entityName           = "Al-Ahram";
            string          context = "the government Al-Ahram newspaper";
            IList <IntPair> offsets = entityMatcher.FindTargetStringOffsets(context, entityName);

            NUnit.Framework.Assert.AreEqual("entityOffsets", "[15 23]", "[" + StringUtils.Join(offsets, ",") + "]");
            context = "the government Al- Ahram newspaper";
            offsets = entityMatcher.FindTargetStringOffsets(context, entityName);
            NUnit.Framework.Assert.AreEqual("entityOffsets", "[15 24]", "[" + StringUtils.Join(offsets, ",") + "]");
            entityName = "Al -Ahram";
            offsets    = entityMatcher.FindTargetStringOffsets(context, entityName);
            NUnit.Framework.Assert.AreEqual("entityOffsets", "[15 24]", "[" + StringUtils.Join(offsets, ",") + "]");
            context = "the government Al-Ahramnewspaper";
            offsets = entityMatcher.FindTargetStringOffsets(context, entityName);
            NUnit.Framework.Assert.IsTrue("entityOffsets", offsets == null || offsets.IsEmpty());
            context = "the government AlAhram newspaper";
            offsets = entityMatcher.FindTargetStringOffsets(context, entityName);
            NUnit.Framework.Assert.AreEqual("entityOffsets", "[15 22]", "[" + StringUtils.Join(offsets, ",") + "]");
            context = "the government alahram newspaper";
            offsets = entityMatcher.FindTargetStringOffsets(context, entityName);
            NUnit.Framework.Assert.AreEqual("entityOffsets", "[15 22]", "[" + StringUtils.Join(offsets, ",") + "]");
        }
Beispiel #21
0
        /// <summary>
        /// Swaps two elements that live in the same list using a 3-way swap.
        /// </summary>
        /// <typeparam name="T">The underlying type of each element in the list.</typeparam>
        /// <param name="list">The list of items.</param>
        /// <param name="fromIndex">Source element index.</param>
        /// <param name="toIndex">Destination element index.</param>
        /// <exception cref="NullReferenceException">If the list is null.</exception>
        /// <exception cref="IndexOutOfRangeException">If the fromIndex index is a negative number or exceeds the index value of the last element in the list.</exception>
        /// <exception cref="IndexOutOfRangeException">If the toIndex index is a negative number or exceeds the index value of the last element in the list</exception>
        public static void SwapByIndex <T>(this IList <T> list, int fromIndex, int toIndex)
        {
            if (list == null)
            {
                throw new NullReferenceException(nameof(list));
            }

            if (list.IsEmpty())
            {
                throw new InvalidOperationException("Unable to swap elements in an empty list.");
            }

            if (fromIndex < 0 || fromIndex > list.LastElementIndex())
            {
                throw new IndexOutOfRangeException(nameof(fromIndex));
            }

            if (toIndex < 0 || toIndex > list.LastElementIndex())
            {
                throw new IndexOutOfRangeException(nameof(fromIndex));
            }

            IListExtensions.SwapInternal(list, fromIndex, toIndex);
        }
Beispiel #22
0
        public static IList <ExprNode> GetExprNodesLibFunc(
            EsperEPL2GrammarParser.LibFunctionArgsContext ctx,
            IDictionary <ITree, ExprNode> astExprNodeMap)
        {
            if (ctx == null)
            {
                return(Collections.GetEmptyList <ExprNode>());
            }
            IList <EsperEPL2GrammarParser.LibFunctionArgItemContext> args = ctx.libFunctionArgItem();

            if (args == null || args.IsEmpty())
            {
                return(Collections.GetEmptyList <ExprNode>());
            }
            var parameters = new List <ExprNode>(args.Count);

            foreach (var arg in args)
            {
                if (arg.expressionLambdaDecl() != null)
                {
                    var lambdaparams = GetLambdaGoesParams(arg.expressionLambdaDecl());
                    var goes         = new ExprLambdaGoesNode(lambdaparams);
                    var lambdaExpr   =
                        ASTExprHelper.ExprCollectSubNodes(arg.expressionWithNamed(), 0, astExprNodeMap)[0];
                    goes.AddChildNode(lambdaExpr);
                    parameters.Add(goes);
                }
                else
                {
                    var parameter =
                        ASTExprHelper.ExprCollectSubNodes(arg.expressionWithNamed(), 0, astExprNodeMap)[0];
                    parameters.Add(parameter);
                }
            }
            return(parameters);
        }
Beispiel #23
0
 /// <summary>Convert an Object[] to a DatanodeInfo[].</summary>
 /// <exception cref="System.IO.IOException"/>
 private static DatanodeInfo[] ToDatanodeInfoArray <_T0>(IList <_T0> objects)
 {
     if (objects == null)
     {
         return(null);
     }
     else
     {
         if (objects.IsEmpty())
         {
             return(EmptyDatanodeInfoArray);
         }
         else
         {
             DatanodeInfo[] array = new DatanodeInfo[objects.Count];
             int            i     = 0;
             foreach (object @object in objects)
             {
                 array[i++] = ToDatanodeInfo((IDictionary <object, object>)@object);
             }
             return(array);
         }
     }
 }
Beispiel #24
0
        /// <summary>Removes one qualifier node and fixes the options.</summary>
        /// <param name="qualNode">qualifier to remove</param>
        public virtual void RemoveQualifier(Com.Adobe.Xmp.Impl.XMPNode qualNode)
        {
            PropertyOptions opts = GetOptions();

            if (qualNode.IsLanguageNode())
            {
                // if "xml:lang" is removed, remove hasLanguage-flag too
                opts.SetHasLanguage(false);
            }
            else
            {
                if (qualNode.IsTypeNode())
                {
                    // if "rdf:type" is removed, remove hasType-flag too
                    opts.SetHasType(false);
                }
            }
            GetQualifier().Remove(qualNode);
            if (qualifier.IsEmpty())
            {
                opts.SetHasQualifiers(false);
                qualifier = null;
            }
        }
 internal virtual PdfCleanUpFilter.FilterResult <ImageData> FilterImage(ImageRenderInfo image, IList <Rectangle
                                                                                                      > imageAreasToBeCleaned)
 {
     if (imageAreasToBeCleaned == null)
     {
         return(new PdfCleanUpFilter.FilterResult <ImageData>(true, null));
     }
     else
     {
         if (imageAreasToBeCleaned.IsEmpty())
         {
             return(new PdfCleanUpFilter.FilterResult <ImageData>(false, null));
         }
     }
     byte[] filteredImageBytes;
     try {
         byte[] originalImageBytes = image.GetImage().GetImageBytes();
         filteredImageBytes = ProcessImage(originalImageBytes, imageAreasToBeCleaned);
     }
     catch (Exception e) {
         throw new Exception(e.Message);
     }
     return(new PdfCleanUpFilter.FilterResult <ImageData>(true, ImageDataFactory.Create(filteredImageBytes)));
 }
Beispiel #26
0
        /// <summary>
        /// Match the views under the stream to the list of view specications passed in. The method changes the view
        /// specifications list passed in and removes those specifications for which matcing views have been found.
        /// If none of the views under the stream matches the first view specification passed in, the method returns
        /// the stream itself and leaves the view specification list unchanged. If one view under the stream matches,
        /// the view's specification is removed from the list. The method will then attempt to determine if any child
        /// views of that view also match specifications.
        /// </summary>
        /// <param name="rootViewable">is the top rootViewable event stream to which all views are attached as child views. This parameter is changed by this method, ie. specifications are removed if they match existing views.</param>
        /// <param name="viewFactories">is the view specifications for making views</param>
        /// <returns>
        /// a pair of (A) the stream if no views matched, or the last child view that matched (B) the full listof parent views
        /// </returns>
        protected internal static Pair <Viewable, IList <View> > MatchExistingViews(Viewable rootViewable, IList <ViewFactory> viewFactories)
        {
            Viewable     currentParent   = rootViewable;
            IList <View> matchedViewList = new List <View>();

            bool foundMatch;

            if (viewFactories.IsEmpty())
            {
                return(new Pair <Viewable, IList <View> >(rootViewable, Collections.GetEmptyList <View>()));
            }

            do      // while ((foundMatch) && (specifications.Count > 0));
            {
                foundMatch = false;

                foreach (View childView in currentParent.Views)
                {
                    ViewFactory currentFactory = viewFactories[0];

                    if (!(currentFactory.CanReuse(childView)))
                    {
                        continue;
                    }

                    // The specifications match, check current data window size
                    viewFactories.RemoveAt(0);
                    currentParent = childView;
                    foundMatch    = true;
                    matchedViewList.Add(childView);
                    break;
                }
            }while ((foundMatch) && (viewFactories.IsNotEmpty()));

            return(new Pair <Viewable, IList <View> >(currentParent, matchedViewList));
        }
Beispiel #27
0
        /// <summary>Remove an xattr for a file or directory.</summary>
        /// <param name="src">- path to remove the xattr from</param>
        /// <param name="xAttr">- xAttr to remove</param>
        /// <exception cref="System.IO.IOException"/>
        internal static HdfsFileStatus RemoveXAttr(FSDirectory fsd, string src, XAttr xAttr
                                                   , bool logRetryCache)
        {
            FSDirXAttrOp.CheckXAttrsConfigFlag(fsd);
            FSPermissionChecker pc = fsd.GetPermissionChecker();

            XAttrPermissionFilter.CheckPermissionForApi(pc, xAttr, FSDirectory.IsReservedRawName
                                                            (src));
            byte[][]      pathComponents = FSDirectory.GetPathComponentsForReservedPath(src);
            IList <XAttr> xAttrs         = Lists.NewArrayListWithCapacity(1);

            xAttrs.AddItem(xAttr);
            INodesInPath iip;

            fsd.WriteLock();
            try
            {
                src = fsd.ResolvePath(pc, src, pathComponents);
                iip = fsd.GetINodesInPath4Write(src);
                CheckXAttrChangeAccess(fsd, iip, xAttr, pc);
                IList <XAttr> removedXAttrs = UnprotectedRemoveXAttrs(fsd, src, xAttrs);
                if (removedXAttrs != null && !removedXAttrs.IsEmpty())
                {
                    fsd.GetEditLog().LogRemoveXAttrs(src, removedXAttrs, logRetryCache);
                }
                else
                {
                    throw new IOException("No matching attributes found for remove operation");
                }
            }
            finally
            {
                fsd.WriteUnlock();
            }
            return(fsd.GetAuditFileInfo(iip));
        }
Beispiel #28
0
        static double GetSlope <T>(IList <T> yArray, Func <T, double> map, out double averageY)
        {
            if (yArray == null)
            {
                throw new ArgumentNullException("yArray", "Is null");
            }
            if (yArray.IsEmpty())
            {
                return(averageY = double.NaN);
            }
            double n = yArray.Count;
            double sumxy = 0, sumx = 0, sumy = 0;
            double sumx2 = 0;

            for (int i = 0; i < n; i++)
            {
                var d = map(yArray[i]);
                sumxy += i * d;
                sumx  += i;
                sumy  += d;
                sumx2 += (long)i * i;
            }
            return((sumxy - sumx * (averageY = sumy / n)) / (sumx2 - sumx * sumx / n));
        }
Beispiel #29
0
        /// <summary>
        /// Move all apps in the set of queues to the parent plan queue's default
        /// reservation queue in a synchronous fashion
        /// </summary>
        private void MoveAppsInQueueSync(string expiredReservation, string defReservationQueue
                                         )
        {
            IList <ApplicationAttemptId> activeApps = scheduler.GetAppsInQueue(expiredReservation
                                                                               );

            if (activeApps.IsEmpty())
            {
                return;
            }
            foreach (ApplicationAttemptId app in activeApps)
            {
                // fallback to parent's default queue
                try
                {
                    scheduler.MoveApplication(app.GetApplicationId(), defReservationQueue);
                }
                catch (YarnException e)
                {
                    Log.Warn("Encountered unexpected error during migration of application: {}" + " from reservation: {}"
                             , app, expiredReservation, e);
                }
            }
        }
Beispiel #30
0
        /// <summary>Update xattrs of inode.</summary>
        /// <remarks>
        /// Update xattrs of inode.
        /// <p/>
        /// Must be called while holding the FSDirectory write lock.
        /// </remarks>
        /// <param name="inode">INode to update</param>
        /// <param name="xAttrs">to update xAttrs.</param>
        /// <param name="snapshotId">id of the latest snapshot of the inode</param>
        /// <exception cref="Org.Apache.Hadoop.Hdfs.Protocol.QuotaExceededException"/>
        public static void UpdateINodeXAttrs(INode inode, IList <XAttr> xAttrs, int snapshotId
                                             )
        {
            if (xAttrs == null || xAttrs.IsEmpty())
            {
                if (inode.GetXAttrFeature() != null)
                {
                    inode.RemoveXAttrFeature(snapshotId);
                }
                return;
            }
            // Dedupe the xAttr name and save them into a new interned list
            IList <XAttr> internedXAttrs = Lists.NewArrayListWithCapacity(xAttrs.Count);

            foreach (XAttr xAttr in xAttrs)
            {
                string name         = xAttr.GetName();
                string internedName = internedNames[name];
                if (internedName == null)
                {
                    internedName = name;
                    internedNames[internedName] = internedName;
                }
                XAttr internedXAttr = new XAttr.Builder().SetName(internedName).SetNameSpace(xAttr
                                                                                             .GetNameSpace()).SetValue(xAttr.GetValue()).Build();
                internedXAttrs.AddItem(internedXAttr);
            }
            // Save the list of interned xattrs
            ImmutableList <XAttr> newXAttrs = ImmutableList.CopyOf(internedXAttrs);

            if (inode.GetXAttrFeature() != null)
            {
                inode.RemoveXAttrFeature(snapshotId);
            }
            inode.AddXAttrFeature(new XAttrFeature(newXAttrs), snapshotId);
        }
Beispiel #31
0
        public static ExprNode ConnectExpressionsByLogicalAnd(
            IList<ExprNode> nodes,
            ExprNode optionalAdditionalFilter)
        {
            if (nodes.IsEmpty()) {
                return optionalAdditionalFilter;
            }

            if (optionalAdditionalFilter == null) {
                if (nodes.Count == 1) {
                    return nodes[0];
                }

                return ConnectExpressionsByLogicalAnd(nodes);
            }

            if (nodes.Count == 1) {
                return ConnectExpressionsByLogicalAnd(Collections.List(nodes[0], optionalAdditionalFilter));
            }

            ExprAndNode andNode = ConnectExpressionsByLogicalAnd(nodes);
            andNode.AddChildNode(optionalAdditionalFilter);
            return andNode;
        }
Beispiel #32
0
 /// <summary>
 /// Print part.
 /// </summary>
 /// <param name="writer">to write to</param>
 public void ToEPL(TextWriter writer)
 {
     writer.Write("expression ");
     if (_optionalReturnType != null)
     {
         writer.Write(_optionalReturnType);
         writer.Write(" ");
     }
     if (_optionalEventTypeName != null)
     {
         writer.Write("@type(");
         writer.Write(_optionalEventTypeName);
         writer.Write(") ");
     }
     if (!string.IsNullOrWhiteSpace(_optionalDialect))
     {
         writer.Write(_optionalDialect);
         writer.Write(":");
     }
     writer.Write(_name);
     writer.Write("(");
     if (_parameterNames != null && !_parameterNames.IsEmpty())
     {
         string delimiter = "";
         foreach (string name in _parameterNames)
         {
             writer.Write(delimiter);
             writer.Write(name);
             delimiter = ",";
         }
     }
     writer.Write(")");
     writer.Write(" [");
     writer.Write(_expressionText);
     writer.Write("]");
 }
Beispiel #33
0
        public override EnumEval GetEnumEval(
            MethodResolutionService methodResolutionService,
            EventAdapterService eventAdapterService,
            StreamTypeService streamTypeService,
            String statementId,
            String enumMethodUsedName,
            IList <ExprDotEvalParam> bodiesAndParameters,
            EventType inputEventType,
            Type collectionComponentType,
            int numStreamsIncoming,
            bool disablePropertyExpressionEventCollCache)
        {
            bool max = this.EnumMethodEnum == EnumMethodEnum.MAX;

            Type returnType;

            if (bodiesAndParameters.IsEmpty())
            {
                returnType    = collectionComponentType.GetBoxedType();
                base.TypeInfo = EPTypeHelper.SingleValue(returnType);
                return(new EnumEvalMinMaxScalar(numStreamsIncoming, max));
            }

            var first = (ExprDotEvalParamLambda)bodiesAndParameters[0];

            returnType    = first.BodyEvaluator.ReturnType.GetBoxedType();
            base.TypeInfo = EPTypeHelper.SingleValue(returnType);

            if (inputEventType == null)
            {
                return(new EnumEvalMinMaxScalarLambda(
                           first.BodyEvaluator, first.StreamCountIncoming, max,
                           (ObjectArrayEventType)first.GoesToTypes[0]));
            }
            return(new EnumEvalMinMaxEvents(first.BodyEvaluator, first.StreamCountIncoming, max));
        }
        /// <summary>Parse a Sentence.</summary>
        /// <remarks>
        /// Parse a Sentence.  It is assumed that when this is called, the pparser
        /// has already been called to parse the sentence.
        /// </remarks>
        /// <param name="words">The list of words to parse.</param>
        /// <returns>true iff it could be parsed</returns>
        public virtual bool Parse <_T0>(IList <_T0> words)
            where _T0 : IHasWord
        {
            nGoodTrees.Clear();
            int numParsesToConsider = numToFind * op.testOptions.fastFactoredCandidateMultiplier + op.testOptions.fastFactoredCandidateAddend;

            if (pparser.HasParse())
            {
                IList <ScoredObject <Tree> > pcfgBest   = pparser.GetKBestParses(numParsesToConsider);
                Beam <ScoredObject <Tree> >  goodParses = new Beam <ScoredObject <Tree> >(numToFind);
                foreach (ScoredObject <Tree> candidate in pcfgBest)
                {
                    if (Thread.Interrupted())
                    {
                        throw new RuntimeInterruptedException();
                    }
                    double depScore       = DepScoreTree(candidate.Object());
                    ScoredObject <Tree> x = new ScoredObject <Tree>(candidate.Object(), candidate.Score() + depScore);
                    goodParses.Add(x);
                }
                nGoodTrees = goodParses.AsSortedList();
            }
            return(!nGoodTrees.IsEmpty());
        }
Beispiel #35
0
        public static string ToTable(IList <Cabin> cabins)
        {
            const string header = "<table style=\"width:100%\" border=1 bordercolor=black>" +
                                  "<caption>Cabins</caption>" +
                                  "<thead>" +
                                  "<tr>" +
                                  "<th>Name</th>" +
                                  "<th>Square</th>" +
                                  "<th>Class</th>" +
                                  "<th>Passengers</th>" +
                                  "</tr>" +
                                  "</thead>" +
                                  "<tbody>";
            const string footer = "</tbody></table>";

            var builder = new StringBuilder();

            builder.Append(header);

            if (cabins.IsEmpty())
            {
                builder.Append(ToRow(_EmptyCabin));
            }
            else
            {
                foreach (var cabin in cabins)
                {
                    builder.Append(ToRow(cabin));
                }
            }


            builder.Append(footer);

            return(builder.ToString());
        }
Beispiel #36
0
        /// <summary>
        /// Removes a number of items from a list based on a predicate selector
        /// </summary>
        /// <typeparam name="T">The underlying type of each element in the list.</typeparam>
        /// <param name="list">The list of items.</param>
        /// <param name="predicate">A predicate that determines if a given element within the list should be removed.</param>
        /// <exception cref="NullReferenceException">If the list is null.</exception>
        /// <exception cref="NullReferenceException">If the predicate discriminator is null.</exception>
        public static void Remove <T>(this IList <T> list, Func <T, bool> predicate)
        {
            if (list == null)
            {
                throw new NullReferenceException("List is null.");
            }

            if (predicate == null)
            {
                throw new ArgumentNullException(nameof(predicate));
            }

            if (list.IsEmpty())
            {
                return;
            }

            var items = list.Where(predicate).ToList();

            foreach (var item in items)
            {
                list.Remove(item);
            }
        }
Beispiel #37
0
        public override void AddChild(IRenderer renderer)
        {
            LayoutTaggingHelper taggingHelper = this.GetProperty <LayoutTaggingHelper>(Property.TAGGING_HELPER);

            if (taggingHelper != null)
            {
                LayoutTaggingHelper.AddTreeHints(taggingHelper, renderer);
            }
            // Some positioned renderers might have been fetched from non-positioned child and added to this renderer,
            // so we use this generic mechanism of determining which renderers have been just added.
            int numberOfChildRenderers           = childRenderers.Count;
            int numberOfPositionedChildRenderers = positionedRenderers.Count;

            base.AddChild(renderer);
            IList <IRenderer> addedRenderers           = new List <IRenderer>(1);
            IList <IRenderer> addedPositionedRenderers = new List <IRenderer>(1);

            while (childRenderers.Count > numberOfChildRenderers)
            {
                addedRenderers.Add(childRenderers[numberOfChildRenderers]);
                childRenderers.JRemoveAt(numberOfChildRenderers);
            }
            while (positionedRenderers.Count > numberOfPositionedChildRenderers)
            {
                addedPositionedRenderers.Add(positionedRenderers[numberOfPositionedChildRenderers]);
                positionedRenderers.JRemoveAt(numberOfPositionedChildRenderers);
            }
            bool marginsCollapsingEnabled = true.Equals(GetPropertyAsBoolean(Property.COLLAPSING_MARGINS));

            if (currentArea == null)
            {
                UpdateCurrentAndInitialArea(null);
                if (marginsCollapsingEnabled)
                {
                    marginsCollapseHandler = new MarginsCollapseHandler(this, null);
                }
            }
            // Static layout
            for (int i = 0; currentArea != null && i < addedRenderers.Count; i++)
            {
                renderer = addedRenderers[i];
                bool rendererIsFloat = FloatingHelper.IsRendererFloating(renderer);
                bool clearanceOverflowsToNextPage = FloatingHelper.IsClearanceApplied(waitingNextPageRenderers, renderer.GetProperty
                                                                                      <ClearPropertyValue?>(Property.CLEAR));
                if (rendererIsFloat && (floatOverflowedCompletely || clearanceOverflowsToNextPage))
                {
                    waitingNextPageRenderers.Add(renderer);
                    floatOverflowedCompletely = true;
                    continue;
                }
                ProcessWaitingKeepWithNextElement(renderer);
                IList <IRenderer>   resultRenderers  = new List <IRenderer>();
                LayoutResult        result           = null;
                RootLayoutArea      storedArea       = null;
                RootLayoutArea      nextStoredArea   = null;
                MarginsCollapseInfo childMarginsInfo = null;
                if (marginsCollapsingEnabled && currentArea != null && renderer != null)
                {
                    childMarginsInfo = marginsCollapseHandler.StartChildMarginsHandling(renderer, currentArea.GetBBox());
                }
                while (clearanceOverflowsToNextPage || currentArea != null && renderer != null && (result = renderer.SetParent
                                                                                                                (this).Layout(new LayoutContext(currentArea.Clone(), childMarginsInfo, floatRendererAreas))).GetStatus
                           () != LayoutResult.FULL)
                {
                    bool currentAreaNeedsToBeUpdated = false;
                    if (clearanceOverflowsToNextPage)
                    {
                        result = new LayoutResult(LayoutResult.NOTHING, null, null, renderer);
                        currentAreaNeedsToBeUpdated = true;
                    }
                    if (result.GetStatus() == LayoutResult.PARTIAL)
                    {
                        if (rendererIsFloat)
                        {
                            waitingNextPageRenderers.Add(result.GetOverflowRenderer());
                            break;
                        }
                        else
                        {
                            ProcessRenderer(result.GetSplitRenderer(), resultRenderers);
                            if (nextStoredArea != null)
                            {
                                currentArea       = nextStoredArea;
                                currentPageNumber = nextStoredArea.GetPageNumber();
                                nextStoredArea    = null;
                            }
                            else
                            {
                                currentAreaNeedsToBeUpdated = true;
                            }
                        }
                    }
                    else
                    {
                        if (result.GetStatus() == LayoutResult.NOTHING && !clearanceOverflowsToNextPage)
                        {
                            if (result.GetOverflowRenderer() is ImageRenderer)
                            {
                                float imgHeight = ((ImageRenderer)result.GetOverflowRenderer()).GetOccupiedArea().GetBBox().GetHeight();
                                if (!floatRendererAreas.IsEmpty() || currentArea.GetBBox().GetHeight() < imgHeight && !currentArea.IsEmptyArea
                                        ())
                                {
                                    if (rendererIsFloat)
                                    {
                                        waitingNextPageRenderers.Add(result.GetOverflowRenderer());
                                        floatOverflowedCompletely = true;
                                        break;
                                    }
                                    currentAreaNeedsToBeUpdated = true;
                                }
                                else
                                {
                                    ((ImageRenderer)result.GetOverflowRenderer()).AutoScale(currentArea);
                                    result.GetOverflowRenderer().SetProperty(Property.FORCED_PLACEMENT, true);
                                    ILog logger = LogManager.GetLogger(typeof(RootRenderer));
                                    logger.Warn(MessageFormatUtil.Format(iText.IO.LogMessageConstant.ELEMENT_DOES_NOT_FIT_AREA, ""));
                                }
                            }
                            else
                            {
                                if (currentArea.IsEmptyArea() && result.GetAreaBreak() == null)
                                {
                                    if (true.Equals(result.GetOverflowRenderer().GetModelElement().GetProperty <bool?>(Property.KEEP_TOGETHER))
                                        )
                                    {
                                        result.GetOverflowRenderer().GetModelElement().SetProperty(Property.KEEP_TOGETHER, false);
                                        ILog logger = LogManager.GetLogger(typeof(RootRenderer));
                                        logger.Warn(MessageFormatUtil.Format(iText.IO.LogMessageConstant.ELEMENT_DOES_NOT_FIT_AREA, "KeepTogether property will be ignored."
                                                                             ));
                                        if (storedArea != null)
                                        {
                                            nextStoredArea    = currentArea;
                                            currentArea       = storedArea;
                                            currentPageNumber = storedArea.GetPageNumber();
                                        }
                                        storedArea = currentArea;
                                    }
                                    else
                                    {
                                        if (null != result.GetCauseOfNothing() && true.Equals(result.GetCauseOfNothing().GetProperty <bool?>(Property
                                                                                                                                             .KEEP_TOGETHER)))
                                        {
                                            // set KEEP_TOGETHER false on the deepest parent (maybe the element itself) to have KEEP_TOGETHER == true
                                            IRenderer theDeepestKeptTogether = result.GetCauseOfNothing();
                                            IRenderer parent;
                                            while (null == theDeepestKeptTogether.GetModelElement() || null == theDeepestKeptTogether.GetModelElement(
                                                       ).GetOwnProperty <bool?>(Property.KEEP_TOGETHER))
                                            {
                                                parent = ((AbstractRenderer)theDeepestKeptTogether).parent;
                                                if (parent == null)
                                                {
                                                    break;
                                                }
                                                theDeepestKeptTogether = parent;
                                            }
                                            theDeepestKeptTogether.GetModelElement().SetProperty(Property.KEEP_TOGETHER, false);
                                            ILog logger = LogManager.GetLogger(typeof(RootRenderer));
                                            logger.Warn(MessageFormatUtil.Format(iText.IO.LogMessageConstant.ELEMENT_DOES_NOT_FIT_AREA, "KeepTogether property of inner element will be ignored."
                                                                                 ));
                                        }
                                        else
                                        {
                                            if (!true.Equals(renderer.GetProperty <bool?>(Property.FORCED_PLACEMENT)))
                                            {
                                                result.GetOverflowRenderer().SetProperty(Property.FORCED_PLACEMENT, true);
                                                ILog logger = LogManager.GetLogger(typeof(RootRenderer));
                                                logger.Warn(MessageFormatUtil.Format(iText.IO.LogMessageConstant.ELEMENT_DOES_NOT_FIT_AREA, ""));
                                            }
                                            else
                                            {
                                                // FORCED_PLACEMENT was already set to the renderer and
                                                // LogMessageConstant.ELEMENT_DOES_NOT_FIT_AREA message was logged.
                                                // This else-clause should never be hit, otherwise there is a bug in FORCED_PLACEMENT implementation.
                                                System.Diagnostics.Debug.Assert(false);
                                                // Still handling this case in order to avoid nasty infinite loops.
                                                break;
                                            }
                                        }
                                    }
                                }
                                else
                                {
                                    storedArea = currentArea;
                                    if (nextStoredArea != null)
                                    {
                                        currentArea       = nextStoredArea;
                                        currentPageNumber = nextStoredArea.GetPageNumber();
                                        nextStoredArea    = null;
                                    }
                                    else
                                    {
                                        if (rendererIsFloat)
                                        {
                                            waitingNextPageRenderers.Add(result.GetOverflowRenderer());
                                            floatOverflowedCompletely = true;
                                            break;
                                        }
                                        currentAreaNeedsToBeUpdated = true;
                                    }
                                }
                            }
                        }
                    }
                    renderer = result.GetOverflowRenderer();
                    if (marginsCollapsingEnabled)
                    {
                        marginsCollapseHandler.EndChildMarginsHandling(currentArea.GetBBox());
                    }
                    if (currentAreaNeedsToBeUpdated)
                    {
                        UpdateCurrentAndInitialArea(result);
                    }
                    if (marginsCollapsingEnabled)
                    {
                        marginsCollapseHandler = new MarginsCollapseHandler(this, null);
                        childMarginsInfo       = marginsCollapseHandler.StartChildMarginsHandling(renderer, currentArea.GetBBox());
                    }
                    clearanceOverflowsToNextPage = clearanceOverflowsToNextPage && FloatingHelper.IsClearanceApplied(waitingNextPageRenderers
                                                                                                                     , renderer.GetProperty <ClearPropertyValue?>(Property.CLEAR));
                }
                if (marginsCollapsingEnabled)
                {
                    marginsCollapseHandler.EndChildMarginsHandling(currentArea.GetBBox());
                }
                if (null != result && null != result.GetSplitRenderer())
                {
                    renderer = result.GetSplitRenderer();
                }
                // Keep renderer until next element is added for future keep with next adjustments
                if (renderer != null && result != null)
                {
                    if (true.Equals(renderer.GetProperty <bool?>(Property.KEEP_WITH_NEXT)))
                    {
                        if (true.Equals(renderer.GetProperty <bool?>(Property.FORCED_PLACEMENT)))
                        {
                            ILog logger = LogManager.GetLogger(typeof(RootRenderer));
                            logger.Warn(iText.IO.LogMessageConstant.ELEMENT_WAS_FORCE_PLACED_KEEP_WITH_NEXT_WILL_BE_IGNORED);
                            ShrinkCurrentAreaAndProcessRenderer(renderer, resultRenderers, result);
                        }
                        else
                        {
                            keepWithNextHangingRenderer             = renderer;
                            keepWithNextHangingRendererLayoutResult = result;
                        }
                    }
                    else
                    {
                        if (result.GetStatus() != LayoutResult.NOTHING)
                        {
                            ShrinkCurrentAreaAndProcessRenderer(renderer, resultRenderers, result);
                        }
                    }
                }
            }
            for (int i = 0; i < addedPositionedRenderers.Count; i++)
            {
                positionedRenderers.Add(addedPositionedRenderers[i]);
                renderer = positionedRenderers[positionedRenderers.Count - 1];
                int?positionedPageNumber = renderer.GetProperty <int?>(Property.PAGE_NUMBER);
                if (positionedPageNumber == null)
                {
                    positionedPageNumber = currentPageNumber;
                }
                LayoutArea layoutArea;
                // For position=absolute, if none of the top, bottom, left, right properties are provided,
                // the content should be displayed in the flow of the current content, not overlapping it.
                // The behavior is just if it would be statically positioned except it does not affect other elements
                if (Convert.ToInt32(LayoutPosition.ABSOLUTE).Equals(renderer.GetProperty <int?>(Property.POSITION)) && AbstractRenderer
                    .NoAbsolutePositionInfo(renderer))
                {
                    layoutArea = new LayoutArea((int)positionedPageNumber, currentArea.GetBBox().Clone());
                }
                else
                {
                    layoutArea = new LayoutArea((int)positionedPageNumber, initialCurrentArea.GetBBox().Clone());
                }
                Rectangle fullBbox = layoutArea.GetBBox().Clone();
                PreparePositionedRendererAndAreaForLayout(renderer, fullBbox, layoutArea.GetBBox());
                renderer.Layout(new PositionedLayoutContext(new LayoutArea(layoutArea.GetPageNumber(), fullBbox), layoutArea
                                                            ));
                if (immediateFlush)
                {
                    FlushSingleRenderer(renderer);
                    positionedRenderers.JRemoveAt(positionedRenderers.Count - 1);
                }
            }
        }
Beispiel #38
0
        public void TestCompute()
        {
            // test null
            IList <EventBean[]> rowsA = null;
            IList <EventBean[]> rowsB = null;

            TryCompute(rowsA, rowsB);
            Assert.IsTrue(_results.IsEmpty());

            // test no rows A
            rowsA = new List <EventBean[]>();
            TryCompute(rowsA, rowsB);
            Assert.IsTrue(_results.IsEmpty());

            // test no rows B
            rowsA = null;
            rowsB = new List <EventBean[]>();
            TryCompute(rowsA, rowsB);
            Assert.IsTrue(_results.IsEmpty());

            // test side A one row, B empty
            rowsA = MakeRowsA(1);
            rowsB = null;
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(1, _results.Count);
            EPAssertionUtil.AssertEqualsExactOrder(rowsA[0], _results[0]);

            // test side B one row, A empty
            rowsA = null;
            rowsB = MakeRowsB(1);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(1, _results.Count);
            EPAssertionUtil.AssertEqualsExactOrder(rowsB[0], _results[0]);

            // test A and B one row
            rowsA = MakeRowsA(1);
            rowsB = MakeRowsB(1);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(1, _results.Count);
            EPAssertionUtil.AssertEqualsExactOrder(
                new[] { rowsA[0][0], rowsB[0][1], null, rowsA[0][3] }, _results[0]);

            // test A=2 rows and B=1 row
            rowsA = MakeRowsA(2);
            rowsB = MakeRowsB(1);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(2, _results.Count);
            EPAssertionUtil.AssertEqualsAnyOrder(
                new[]
            {
                new[] { rowsA[0][0], rowsB[0][1], null, rowsA[0][3] },
                new[] { rowsA[1][0], rowsB[0][1], null, rowsA[1][3] }
            }, SupportJoinResultNodeFactory.ConvertTo2DimArr(_results));

            // test A=1 rows and B=2 row
            rowsA = MakeRowsA(1);
            rowsB = MakeRowsB(2);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(2, _results.Count);
            EPAssertionUtil.AssertEqualsAnyOrder(
                new[]
            {
                new[] { rowsA[0][0], rowsB[0][1], null, rowsA[0][3] },
                new[] { rowsA[0][0], rowsB[1][1], null, rowsA[0][3] }
            }, SupportJoinResultNodeFactory.ConvertTo2DimArr(_results));

            // test A=2 rows and B=2 row
            rowsA = MakeRowsA(2);
            rowsB = MakeRowsB(2);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(4, _results.Count);
            EPAssertionUtil.AssertEqualsAnyOrder(
                new[]
            {
                new[] { rowsA[0][0], rowsB[0][1], null, rowsA[0][3] },
                new[] { rowsA[0][0], rowsB[1][1], null, rowsA[0][3] },
                new[] { rowsA[1][0], rowsB[0][1], null, rowsA[1][3] },
                new[] { rowsA[1][0], rowsB[1][1], null, rowsA[1][3] }
            }, SupportJoinResultNodeFactory.ConvertTo2DimArr(_results));

            // test A=2 rows and B=3 row
            rowsA = MakeRowsA(2);
            rowsB = MakeRowsB(3);
            TryCompute(rowsA, rowsB);
            Assert.AreEqual(6, _results.Count);
            EPAssertionUtil.AssertEqualsAnyOrder(
                new[]
            {
                new[] { rowsA[0][0], rowsB[0][1], null, rowsA[0][3] },
                new[] { rowsA[0][0], rowsB[1][1], null, rowsA[0][3] },
                new[] { rowsA[0][0], rowsB[2][1], null, rowsA[0][3] },
                new[] { rowsA[1][0], rowsB[0][1], null, rowsA[1][3] },
                new[] { rowsA[1][0], rowsB[1][1], null, rowsA[1][3] },
                new[] { rowsA[1][0], rowsB[2][1], null, rowsA[1][3] }
            },
                SupportJoinResultNodeFactory.ConvertTo2DimArr(_results));
        }
Beispiel #39
0
        public static ExprDotNodeRealizedChain GetChainEvaluators(
            int?streamOfProviderIfApplicable,
            EPType inputType,
            IList <ExprChainedSpec> chainSpec,
            ExprValidationContext validationContext,
            bool isDuckTyping,
            ExprDotNodeFilterAnalyzerInput inputDesc)
        {
            var            methodEvals      = new List <ExprDotEval>();
            var            currentInputType = inputType;
            EnumMethodEnum?lastLambdaFunc   = null;
            var            lastElement      = chainSpec.IsEmpty() ? null : chainSpec[chainSpec.Count - 1];
            ExprDotNodeFilterAnalyzerDesc filterAnalyzerDesc = null;

            var chainSpecStack = new ArrayDeque <ExprChainedSpec>(chainSpec);

            while (!chainSpecStack.IsEmpty())
            {
                var chainElement = chainSpecStack.RemoveFirst();
                lastLambdaFunc = null;  // reset

                // compile parameters for chain element
                var paramEvals = new ExprEvaluator[chainElement.Parameters.Count];
                var paramTypes = new Type[chainElement.Parameters.Count];
                for (var i = 0; i < chainElement.Parameters.Count; i++)
                {
                    paramEvals[i] = chainElement.Parameters[i].ExprEvaluator;
                    paramTypes[i] = paramEvals[i].ReturnType;
                }

                // check if special 'size' method
                if (currentInputType is ClassMultiValuedEPType)
                {
                    var type = (ClassMultiValuedEPType)currentInputType;
                    if ((chainElement.Name.ToLower() == "size") && paramTypes.Length == 0 && Equals(lastElement, chainElement))
                    {
                        var sizeExpr = new ExprDotEvalArraySize();
                        methodEvals.Add(sizeExpr);
                        currentInputType = sizeExpr.TypeInfo;
                        continue;
                    }
                    if ((chainElement.Name.ToLower() == "get") && paramTypes.Length == 1 && paramTypes[0].GetBoxedType() == typeof(int?))
                    {
                        var componentType = type.Component;
                        var get           = new ExprDotEvalArrayGet(paramEvals[0], componentType);
                        methodEvals.Add(get);
                        currentInputType = get.TypeInfo;
                        continue;
                    }
                }

                // determine if there is a matching method
                var matchingMethod = false;
                var methodTarget   = GetMethodTarget(currentInputType);
                if (methodTarget != null)
                {
                    try
                    {
                        GetValidateMethodDescriptor(methodTarget, chainElement.Name, chainElement.Parameters, validationContext);
                        matchingMethod = true;
                    }
                    catch (ExprValidationException)
                    {
                        // expected
                    }
                }

                // resolve lambda
                if (chainElement.Name.IsEnumerationMethod() && (!matchingMethod || methodTarget.IsArray || methodTarget.IsImplementsInterface(typeof(ICollection <object>))))
                {
                    var enumerationMethod = EnumMethodEnumExtensions.FromName(chainElement.Name);
                    var eval = TypeHelper.Instantiate <ExprDotEvalEnumMethod>(enumerationMethod.GetImplementation());
                    eval.Init(streamOfProviderIfApplicable, enumerationMethod, chainElement.Name, currentInputType, chainElement.Parameters, validationContext);
                    currentInputType = eval.TypeInfo;
                    if (currentInputType == null)
                    {
                        throw new IllegalStateException("Enumeration method '" + chainElement.Name + "' has not returned type information");
                    }
                    methodEvals.Add(eval);
                    lastLambdaFunc = enumerationMethod;
                    continue;
                }

                // resolve datetime
                if (chainElement.Name.IsDateTimeMethod() && (!matchingMethod || methodTarget == typeof(DateTimeOffset?)))
                {
                    var datetimeMethod = DatetimeMethodEnumExtensions.FromName(chainElement.Name);
                    var datetimeImpl   = ExprDotEvalDTFactory.ValidateMake(
                        validationContext.StreamTypeService, chainSpecStack, datetimeMethod, chainElement.Name,
                        currentInputType, chainElement.Parameters, inputDesc,
                        validationContext.EngineImportService.TimeZone,
                        validationContext.EngineImportService.TimeAbacus);
                    currentInputType = datetimeImpl.ReturnType;
                    if (currentInputType == null)
                    {
                        throw new IllegalStateException("Date-time method '" + chainElement.Name + "' has not returned type information");
                    }
                    methodEvals.Add(datetimeImpl.Eval);
                    filterAnalyzerDesc = datetimeImpl.IntervalFilterDesc;
                    continue;
                }

                // try to resolve as property if the last method returned a type
                if (currentInputType is EventEPType)
                {
                    var inputEventType = ((EventEPType)currentInputType).EventType;
                    var type           = inputEventType.GetPropertyType(chainElement.Name);
                    var getter         = inputEventType.GetGetter(chainElement.Name);
                    if (type != null && getter != null)
                    {
                        var noduck = new ExprDotEvalProperty(getter, EPTypeHelper.SingleValue(type.GetBoxedType()));
                        methodEvals.Add(noduck);
                        currentInputType = EPTypeHelper.SingleValue(EPTypeHelper.GetClassSingleValued(noduck.TypeInfo));
                        continue;
                    }
                }

                // Finally try to resolve the method
                if (methodTarget != null)
                {
                    try
                    {
                        // find descriptor again, allow for duck typing
                        var desc       = GetValidateMethodDescriptor(methodTarget, chainElement.Name, chainElement.Parameters, validationContext);
                        var fastMethod = desc.FastMethod;
                        paramEvals = desc.ChildEvals;

                        ExprDotEval eval;
                        if (currentInputType is ClassEPType)
                        {
                            // if followed by an enumeration method, convert array to collection
                            if (fastMethod.ReturnType.IsArray && !chainSpecStack.IsEmpty() && chainSpecStack.First.Name.IsEnumerationMethod())
                            {
                                eval = new ExprDotMethodEvalNoDuckWrapArray(validationContext.StatementName, fastMethod, paramEvals);
                            }
                            else
                            {
                                eval = new ExprDotMethodEvalNoDuck(validationContext.StatementName, fastMethod, paramEvals);
                            }
                        }
                        else
                        {
                            eval = new ExprDotMethodEvalNoDuckUnderlying(validationContext.StatementName, fastMethod, paramEvals);
                        }
                        methodEvals.Add(eval);
                        currentInputType = eval.TypeInfo;
                    }
                    catch (Exception e)
                    {
                        if (!isDuckTyping)
                        {
                            throw new ExprValidationException(e.Message, e);
                        }
                        else
                        {
                            var duck = new ExprDotMethodEvalDuck(validationContext.StatementName, validationContext.EngineImportService, chainElement.Name, paramTypes, paramEvals);
                            methodEvals.Add(duck);
                            currentInputType = duck.TypeInfo;
                        }
                    }
                    continue;
                }

                var message = "Could not find event property, enumeration method or instance method named '" +
                              chainElement.Name + "' in " + currentInputType.ToTypeDescriptive();
                throw new ExprValidationException(message);
            }

            var intermediateEvals = methodEvals.ToArray();

            if (lastLambdaFunc != null)
            {
                ExprDotEval finalEval = null;
                if (currentInputType is EventMultiValuedEPType)
                {
                    var mvType        = (EventMultiValuedEPType)currentInputType;
                    var tableMetadata = validationContext.TableService.GetTableMetadataFromEventType(mvType.Component);
                    if (tableMetadata != null)
                    {
                        finalEval = new ExprDotEvalUnpackCollEventBeanTable(mvType.Component, tableMetadata);
                    }
                    else
                    {
                        finalEval = new ExprDotEvalUnpackCollEventBean(mvType.Component);
                    }
                }
                else if (currentInputType is EventEPType)
                {
                    var epType        = (EventEPType)currentInputType;
                    var tableMetadata = validationContext.TableService.GetTableMetadataFromEventType(epType.EventType);
                    if (tableMetadata != null)
                    {
                        finalEval = new ExprDotEvalUnpackBeanTable(epType.EventType, tableMetadata);
                    }
                    else
                    {
                        finalEval = new ExprDotEvalUnpackBean(epType.EventType);
                    }
                }
                if (finalEval != null)
                {
                    methodEvals.Add(finalEval);
                }
            }

            var unpackingEvals = methodEvals.ToArray();

            return(new ExprDotNodeRealizedChain(intermediateEvals, unpackingEvals, filterAnalyzerDesc));
        }
Beispiel #40
0
        /// <summary>
        /// Removes a view from a parent view returning the orphaned parent views in a list.
        /// </summary>
        /// <param name="parentViewable">parent to remove view from</param>
        /// <param name="viewToRemove">view to remove</param>
        /// <returns>
        /// chain of orphaned views
        /// </returns>
        /// <exception cref="System.ArgumentException"></exception>
        /// <exception cref="IllegalStateException">
        /// </exception>
        protected internal static IList <View> RemoveChainLeafView(Viewable parentViewable, Viewable viewToRemove)
        {
            IList <View> removedViews = new List <View>();

            // The view to remove must be a leaf node - non-leaf views are just not removed
            if (viewToRemove.HasViews)
            {
                return(removedViews);
            }

            // Find child viewToRemove among descendent views
            IList <View> viewPath = ViewSupport.FindDescendent(parentViewable, viewToRemove);

            if (viewPath == null)
            {
                String message = "Viewable not found when removing view " + viewToRemove;
                throw new ArgumentException(message);
            }

            // The viewToRemove is a direct child view of the stream
            if (viewPath.IsEmpty())
            {
                bool isViewRemoved = parentViewable.RemoveView((View)viewToRemove);

                if (!isViewRemoved)
                {
                    String message = "Failed to remove immediate child view " + viewToRemove;
                    Log.Fatal(".remove " + message);
                    throw new IllegalStateException(message);
                }

                removedViews.Add((View)viewToRemove);
                return(removedViews);
            }

            View[] viewPathArray = viewPath.ToArray();
            View   currentView   = (View)viewToRemove;

            // Remove child from parent views until a parent view has more children,
            // or there are no more parents (index=0).
            for (int index = viewPathArray.Length - 1; index >= 0; index--)
            {
                bool isViewRemoved = viewPathArray[index].RemoveView(currentView);
                removedViews.Add(currentView);

                if (!isViewRemoved)
                {
                    String message = "Failed to remove view " + currentView;
                    Log.Fatal(".remove " + message);
                    throw new IllegalStateException(message);
                }

                // If the parent views has more child views, we are done
                if (viewPathArray[index].HasViews)
                {
                    break;
                }

                // The parent of the top parent is the stream, remove from stream
                if (index == 0)
                {
                    parentViewable.RemoveView(viewPathArray[0]);
                    removedViews.Add(viewPathArray[0]);
                }
                else
                {
                    currentView = viewPathArray[index];
                }
            }

            return(removedViews);
        }
        private void PrepareCategoryPostCountModel(CategoryPostCountModel model, IList<CategoryPostCount> categories)
        {
            model = model ?? new CategoryPostCountModel();

            if (!categories.IsEmpty())
            {
                StringBuilder sbPie = new StringBuilder(@"[");
                StringBuilder sbBarCount = new StringBuilder(@"[");
                StringBuilder sbBarCat = new StringBuilder(@"[");
                foreach (CategoryPostCount m in categories)
                {
                    m.Category = HttpUtility.JavaScriptStringEncode(string.Format("{0} ({1})", m.Category, m.PostCount));

                    sbPie.AppendFormat(@"['{0}', {1}],", m.Category, m.PostCount);

                    sbBarCount.AppendFormat(@"{0}, ", m.PostCount);
                    sbBarCat.AppendFormat(@"'{0}', ", m.Category);
                }
                sbPie.Append(@"]");
                sbBarCount.Append(@"]");
                sbBarCat.Append(@"]");

                model.JsonPie = sbPie.ToString();
                model.JsonBarCount = sbBarCount.ToString();
                model.JsonBarCategory = sbBarCat.ToString();
            }
        }
Beispiel #42
0
        /// <summary>
        /// Returns PackageSources if specified in the config file. Else returns the default sources specified in the constructor.
        /// If no default values were specified, returns an empty sequence.
        /// </summary>
        public IEnumerable <PackageSource> LoadPackageSources()
        {
            var sources                 = new HashSet <string>(StringComparer.OrdinalIgnoreCase);
            var settingsValue           = new List <SettingValue>();
            IList <SettingValue> values = _settingsManager.GetSettingValues(PackageSourcesSectionName, isPath: true);
            var machineWideSourcesCount = 0;

            if (!values.IsEmpty())
            {
                var machineWideSources = new List <SettingValue>();

                // remove duplicate sources. Pick the one with the highest priority.
                // note that Reverse() is needed because items in 'values' is in
                // ascending priority order.
                foreach (var settingValue in values.Reverse())
                {
                    if (!sources.Contains(settingValue.Key))
                    {
                        if (settingValue.IsMachineWide)
                        {
                            machineWideSources.Add(settingValue);
                        }
                        else
                        {
                            settingsValue.Add(settingValue);
                        }

                        sources.Add(settingValue.Key);
                    }
                }

                // Reverse the the list to be backward compatible
                settingsValue.Reverse();
                machineWideSourcesCount = machineWideSources.Count;

                // Add machine wide sources at the end
                settingsValue.AddRange(machineWideSources);
            }

            var loadedPackageSources = new List <PackageSource>();

            if (!settingsValue.IsEmpty())
            {
                // put disabled package source names into the hash set

                IEnumerable <KeyValuePair <string, string> > disabledSourcesValues = _settingsManager.GetValues(DisabledPackageSourcesSectionName) ??
                                                                                     Enumerable.Empty <KeyValuePair <string, string> >();
                var disabledSources = new HashSet <string>(disabledSourcesValues.Select(s => s.Key), StringComparer.CurrentCultureIgnoreCase);
                loadedPackageSources = settingsValue.
                                       Select(p =>
                {
                    string name = p.Key;
                    string src  = p.Value;
                    PackageSourceCredential creds = ReadCredential(name);

                    return(new PackageSource(src, name, isEnabled: !disabledSources.Contains(name))
                    {
                        UserName = creds != null ? creds.Username : null,
                        Password = creds != null ? creds.Password : null,
                        IsPasswordClearText = creds != null && creds.IsPasswordClearText,
                        IsMachineWide = p.IsMachineWide
                    });
                }).ToList();

                if (_migratePackageSources != null)
                {
                    MigrateSources(loadedPackageSources);
                }
            }

            SetDefaultPackageSources(loadedPackageSources, machineWideSourcesCount);

            return(loadedPackageSources);
        }
        /// <summary>
        /// The stiffness matrix which relates to known forces and unknown displacements
        /// </summary>
        /// <returns>A stiffness matrix for known forces and unknown displacements</returns>
        public StiffnessMatrix BuildKnownForcesUnknownDisplacementStiffnessMatrix()
        {
            IList <NodalDegreeOfFreedom> knownForceIdentifiers = this.DegreesOfFreedomWithKnownForce;

            Guard.AgainstInvalidState(() => { return(knownForceIdentifiers == null || knownForceIdentifiers.IsEmpty()); },
                                      "The model has too many constraints and no displacements will occur.  The reactions of each node equals the forces applied to each node.");

            IList <NodalDegreeOfFreedom> unknownDisplacementIdentifiers = this.DegreesOfFreedomWithUnknownDisplacement;

            return(this.BuildStiffnessSubMatrix(knownForceIdentifiers, unknownDisplacementIdentifiers));
        }
Beispiel #44
0
 /// <summary>
 /// Проверка, что выполнены все доступные миграции с номерами меньше текущей
 /// </summary>
 /// <param name="availableMigrations">Доступные миграции</param>
 /// <param name="appliedVersions">Выполненные миграции</param>
 public static void CheckMigrationNumbers(IList<long> availableMigrations, IList<long> appliedVersions)
 {
     long current = appliedVersions.IsEmpty() ? 0 : appliedVersions.Max();
     var skippedMigrations = availableMigrations.Where(m => m <= current && !appliedVersions.Contains(m));
     Require.AreEqual(skippedMigrations.Count(), 0, "The current database version is {0}, the migration {1} are available but not used".FormatWith(current, skippedMigrations.ToCommaSeparatedString()));
 }
        private bool ScavengeChunks(bool alwaysKeepScavenged, IList<TFChunk.TFChunk> oldChunks)
        {
            if (oldChunks.IsEmpty()) throw new ArgumentException("Provided list of chunks to scavenge and merge is empty.");
            
            var sw = Stopwatch.StartNew();

            int chunkStartNumber = oldChunks.First().ChunkHeader.ChunkStartNumber;
            long chunkStartPosition = oldChunks.First().ChunkHeader.ChunkStartPosition;
            int chunkEndNumber = oldChunks.Last().ChunkHeader.ChunkEndNumber;

            var tmpChunkPath = Path.Combine(_db.Config.Path, Guid.NewGuid() + ".scavenge.tmp");
            var oldChunksLogList = string.Join("\n",
                                               oldChunks.Select(x => string.Format("#{0}-{1} ({2})",
                                                                                   x.ChunkHeader.ChunkStartNumber,
                                                                                   x.ChunkHeader.ChunkEndNumber,
                                                                                   Path.GetFileName(x.FileName))));
            Log.Trace("SCAVENGING: started to scavenge & merge chunks: {0}\nResulting temp chunk file: {1}.",
                      oldChunksLogList, Path.GetFileName(tmpChunkPath));

            TFChunk.TFChunk newChunk;
            try
            {
                newChunk = TFChunk.TFChunk.CreateNew(tmpChunkPath, _db.Config.ChunkSize, chunkStartNumber, chunkEndNumber, isScavenged: true);
            }
            catch (IOException exc)
            {
                Log.ErrorException(exc, "IOException during creating new chunk for scavenging purposes. Ignoring...");
                return false;
            }

            try
            {
                var commits = new Dictionary<long, CommitInfo>();

                foreach (var oldChunk in oldChunks)
                {
                    TraverseChunk(oldChunk,
                                  prepare => { /* NOOP */ },
                                  commit =>
                                  {
                                      if (commit.TransactionPosition >= chunkStartPosition)
                                          commits.Add(commit.TransactionPosition, new CommitInfo(commit));
                                  },
                                  system => { /* NOOP */ });
                }

                var positionMapping = new List<PosMap>();
                foreach (var oldChunk in oldChunks)
                {
                    TraverseChunk(oldChunk,
                                  prepare => 
                                  {
                                      if (ShouldKeepPrepare(prepare, commits))
                                          positionMapping.Add(WriteRecord(newChunk, prepare));
                                  },
                                  commit =>
                                  {
                                      if (ShouldKeepCommit(commit, commits))
                                          positionMapping.Add(WriteRecord(newChunk, commit));
                                  },
                                  // we always keep system log records for now
                                  system => positionMapping.Add(WriteRecord(newChunk, system)));
                }
                var oldSize = oldChunks.Sum(x => (long)x.PhysicalDataSize + x.ChunkFooter.MapSize + ChunkHeader.Size + ChunkFooter.Size);
                var newSize = (long)newChunk.PhysicalDataSize + PosMap.FullSize * positionMapping.Count + ChunkHeader.Size + ChunkFooter.Size;

                if (!alwaysKeepScavenged && oldSize <= newSize)
                {
                    Log.Trace("Scavenging of chunks:\n{0}\n"
                              + "completed in {1}.\n"
                              + "Old chunks' versions are kept as they are smaller.\n"
                              + "Old chunk total size: {2}, scavenged chunk size: {3}.\n"
                              + "Scavenged chunk removed.",
                              oldChunksLogList, sw.Elapsed, oldSize, newSize);

                    newChunk.MarkForDeletion();
                    return false;
                }
                else
                {
                    newChunk.CompleteScavenge(positionMapping);
                    var chunk = _db.Manager.SwitchChunk(newChunk, verifyHash: false, removeChunksWithGreaterNumbers: false);
                    if (chunk != null)
                    {
                        Log.Trace("Scavenging of chunks:\n{0}\n"
                                  + "completed in {1}.\n"
                                  + "New chunk: {2} --> #{3}-{4} ({5}).\n"
                                  + "Old chunks total size: {6}, scavenged chunk size: {7}.",
                                  oldChunksLogList, sw.Elapsed,
                                  Path.GetFileName(tmpChunkPath), chunkStartNumber, chunkEndNumber, Path.GetFileName(chunk.FileName),
                                  oldSize, newSize);
                        return true;
                    }
                    else
                    {
                        Log.Trace("Scavenging of chunks:\n{0}\n"
                                  + "completed in {1}.\n"
                                  + "But switching was prevented for new chunk: #{2}-{3} ({4}).\n"
                                  + "Old chunks total size: {5}, scavenged chunk size: {6}.",
                                  oldChunksLogList, sw.Elapsed,
                                  chunkStartNumber, chunkEndNumber, Path.GetFileName(tmpChunkPath),
                                  oldSize, newSize);
                        return false;
                    }
                }
            }
            catch (FileBeingDeletedException exc)
            {
                Log.Info("Got FileBeingDeletedException exception during scavenging, that probably means some chunks were re-replicated.\n"
                         + "Scavenging of following chunks will be skipped:\n{0}\n"
                         + "Stopping scavenging and removing temp chunk '{1}'...\n"
                         + "Exception message: {2}.", oldChunksLogList, tmpChunkPath, exc.Message);
                Helper.EatException(() =>
                {
                    File.SetAttributes(tmpChunkPath, FileAttributes.Normal);
                    File.Delete(tmpChunkPath);
                });
                return false;
            }
        }
Beispiel #46
0
        /// <summary>
        /// Saves the nodes.
        /// </summary>
        /// <param name="sitemap">The sitemap.</param>
        /// <param name="nodeModels">The node models.</param>
        /// <param name="currentNodes">The existing nodes.</param>
        /// <param name="createdNodes">The created nodes.</param>
        /// <param name="updatedNodes">The sitemap nodes.</param>
        /// <param name="deletedNodes">The deleted nodes.</param>
        private void SaveNodes(Module.Pages.Models.Sitemap sitemap, IList<SaveSitemapNodeModel> nodeModels, List<SitemapNode> currentNodes, ref IList<SitemapNode> createdNodes, ref IList<SitemapNode> updatedNodes, ref IList<SitemapNode> deletedNodes)
        {
            var removeAll = nodeModels.IsEmpty();

            foreach (var existingNode in currentNodes)
            {
                if (removeAll || !NodeExist(nodeModels, existingNode.Id))
                {
                    repository.Delete(existingNode);
                    deletedNodes.Add(existingNode);
                }
            }

            if (removeAll)
            {
                return;
            }

            SaveChildNodes(sitemap, null, nodeModels, currentNodes, ref createdNodes, ref updatedNodes, ref deletedNodes);
        }
Beispiel #47
0
        /// <summary>
        /// Nodes the exist.
        /// </summary>
        /// <param name="updatedNodes">The updated nodes.</param>
        /// <param name="id">The identifier.</param>
        /// <returns>
        ///   <c>true</c> if node exists; <c>false</c> otherwise.
        /// </returns>
        private bool NodeExist(IList<SaveSitemapNodeModel> updatedNodes, Guid id)
        {
            if (updatedNodes == null || updatedNodes.IsEmpty())
            {
                return false;
            }

            foreach (var node in updatedNodes)
            {
                if (node.Id == id || NodeExist(node.Nodes, id))
                {
                    return true;
                }
            }

            return false;
        }
        private void PrepareUsersGenderCountModel(UsersGenderCountModel model, IList<UsersGenderCount> genders)
        {
            model = model ?? new UsersGenderCountModel();

            if (!genders.IsEmpty())
            {
                StringBuilder sbPie = new StringBuilder(@"[");

                foreach (UsersGenderCount g in genders)
                {
                    g.Gender = HttpUtility.JavaScriptStringEncode(string.Format("{0} ({1})", g.Gender, g.Count));

                    sbPie.AppendFormat(@"['{0}', {1}],", g.Gender, g.Count);
                }

                sbPie.Append(@"]");

                model.JsonPie = sbPie.ToString();
            }
        }
        private void PrepareNewsFromRegionModel(NewsFromRegionModel model, IList<String> regions)
        {
            model = model ?? new NewsFromRegionModel();

            if (!regions.IsEmpty())
            {
                model.AllRegions = regions.Select(x =>
                {
                    var m = new SelectListItem()
                    {
                        Text = x,
                        Value = x,
                        Selected = !String.IsNullOrEmpty(model.SelectedRegion) && model.SelectedRegion == x
                    };

                    return m;
                });
            }
        }
        private void PrepareGoogleMapsLocationsModel(GoogleMapsLocationsModel model, IList<LocationInfo> locations)
        {
            model = model ?? new GoogleMapsLocationsModel();

            if (!locations.IsEmpty())
            {
                /// filter only locations with coordinates
                locations = locations.Where(x => x.Longitude.HasValue && x.Latitude.HasValue).ToList();

                foreach (LocationInfo li in locations)
                {
                    li.Name = HttpUtility.JavaScriptStringEncode(li.Name);
                    li.Description = HttpUtility.JavaScriptStringEncode(li.Description);
                }

                model.Locations = locations;
            }
        }
Beispiel #51
0
 protected override bool IsAvailableWithAttributeInstances(IList <IAttributeInstance> existingAttributes)
 {
     return(!existingAttributes.IsEmpty());
 }
Beispiel #52
0
 private static IList<string> EnsureListType(IList<string> list)
 {
     if (list == null)
     {
         return null;
     }
     if (list.IsEmpty())
     {
         return new List<string>(0);
     }
     if (list is List<string>)
     {
         return list;
     }
     return new List<string>(list);
 }
        private bool ScavengeChunks(bool alwaysKeepScavenged, IList<TFChunk.TFChunk> oldChunks, CancellationToken ct)
        {

            if (oldChunks.IsEmpty()) throw new ArgumentException("Provided list of chunks to scavenge and merge is empty.");

            var sw = Stopwatch.StartNew();

            int chunkStartNumber = oldChunks.First().ChunkHeader.ChunkStartNumber;
            long chunkStartPos = oldChunks.First().ChunkHeader.ChunkStartPosition;
            int chunkEndNumber = oldChunks.Last().ChunkHeader.ChunkEndNumber;
            long chunkEndPos = oldChunks.Last().ChunkHeader.ChunkEndPosition;

            var tmpChunkPath = Path.Combine(_db.Config.Path, Guid.NewGuid() + ".scavenge.tmp");
            var oldChunksList = string.Join("\n", oldChunks);
            Log.Trace("SCAVENGING: started to scavenge & merge chunks: {0}", oldChunksList);
            Log.Trace("Resulting temp chunk file: {0}.", Path.GetFileName(tmpChunkPath));

            TFChunk.TFChunk newChunk;
            try
            {
                newChunk = TFChunk.TFChunk.CreateNew(tmpChunkPath,
                                                     _db.Config.ChunkSize,
                                                     chunkStartNumber,
                                                     chunkEndNumber,
                                                     isScavenged: true,
                                                     inMem: _db.Config.InMemDb,
                                                     unbuffered: _db.Config.Unbuffered,
                                                     writethrough: _db.Config.WriteThrough,
                                                     initialReaderCount: _db.Config.InitialReaderCount,
                                                     reduceFileCachePressure: _db.Config.ReduceFileCachePressure);
            }
            catch (IOException exc)
            {
                Log.ErrorException(exc, "IOException during creating new chunk for scavenging purposes. Stopping scavenging process...");
                return false;
            }

            try
            {
                var commits = new Dictionary<long, CommitInfo>();

                foreach (var oldChunk in oldChunks)
                {
                    TraverseChunk(oldChunk,
                        (prepare, _) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            /* NOOP */
                        },
                        (commit, _) =>
                        {

                            ct.ThrowIfCancellationRequested();

                            if (commit.TransactionPosition >= chunkStartPos)
                                commits.Add(commit.TransactionPosition, new CommitInfo(commit));
                        },
                        (system, _) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            /* NOOP */
                        });
                }

                long newSize = 0;
                int positionMapCount = 0;

                foreach (var oldChunk in oldChunks)
                {
                    ct.ThrowIfCancellationRequested();

                    TraverseChunk(oldChunk,
                        (prepare, len) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            if (ShouldKeepPrepare(prepare, commits, chunkStartPos, chunkEndPos))
                            {
                                newSize += len + 2 * sizeof(int);
                                positionMapCount++;
                            }
                        },
                        (commit, len) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            if (ShouldKeepCommit(commit, commits))
                            {
                                newSize += len + 2 * sizeof(int);
                                positionMapCount++;
                            }
                        },
                        (system, len) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            newSize += len + 2 * sizeof(int);
                            positionMapCount++;
                        });
                }

                newSize += positionMapCount * PosMap.FullSize + ChunkHeader.Size + ChunkFooter.Size;
                if (newChunk.ChunkHeader.Version >= (byte) TFChunk.TFChunk.ChunkVersions.Aligned)
                    newSize = TFChunk.TFChunk.GetAlignedSize((int) newSize);

                var oldVersion = oldChunks.Any(x => x.ChunkHeader.Version != 3);
                var oldSize = oldChunks.Sum(x => (long) x.FileSize);

                if (oldSize <= newSize && !alwaysKeepScavenged && !_unsafeIgnoreHardDeletes && !oldVersion)
                {
                    Log.Trace("Scavenging of chunks:");
                    Log.Trace(oldChunksList);
                    Log.Trace("completed in {0}.", sw.Elapsed);
                    Log.Trace("Old chunks' versions are kept as they are smaller.");
                    Log.Trace("Old chunk total size: {0}, scavenged chunk size: {1}.", oldSize, newSize);
                    Log.Trace("Scavenged chunk removed.");

                    newChunk.MarkForDeletion();
                    _scavengerLog.ChunksNotScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "");

                    return false;
                }

                var positionMapping = new List<PosMap>();
                foreach (var oldChunk in oldChunks)
                {
                    TraverseChunk(oldChunk,
                        (prepare, _) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            if (ShouldKeepPrepare(prepare, commits, chunkStartPos, chunkEndPos))
                                positionMapping.Add(WriteRecord(newChunk, prepare));
                        },
                        (commit, _) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            if (ShouldKeepCommit(commit, commits))
                                positionMapping.Add(WriteRecord(newChunk, commit));
                        },
                        // we always keep system log records for now
                        (system, _) =>
                        {
                            ct.ThrowIfCancellationRequested();

                            positionMapping.Add(WriteRecord(newChunk, system));
                        });
                }

                newChunk.CompleteScavenge(positionMapping);

                if (_unsafeIgnoreHardDeletes)
                {
                    Log.Trace("Forcing scavenge chunk to be kept even if bigger.");
                }

                if (oldVersion)
                {
                    Log.Trace("Forcing scavenged chunk to be kept as old chunk is a previous version.");
                }

                var chunk = _db.Manager.SwitchChunk(newChunk, verifyHash: false, removeChunksWithGreaterNumbers: false);
                if (chunk != null)
                {
                    Log.Trace("Scavenging of chunks:");
                    Log.Trace(oldChunksList);
                    Log.Trace("completed in {0}.", sw.Elapsed);
                    Log.Trace("New chunk: {0} --> #{1}-{2} ({3}).", Path.GetFileName(tmpChunkPath), chunkStartNumber,
                        chunkEndNumber, Path.GetFileName(chunk.FileName));
                    Log.Trace("Old chunks total size: {0}, scavenged chunk size: {1}.", oldSize, newSize);
                    var spaceSaved = oldSize - newSize;
                    _scavengerLog.ChunksScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, spaceSaved);

                    return true;
                }
                else
                {
                    Log.Trace("Scavenging of chunks:");
                    Log.Trace("{0}", oldChunksList);
                    Log.Trace("completed in {1}.", sw.Elapsed);
                    Log.Trace("But switching was prevented for new chunk: #{0}-{1} ({2}).", chunkStartNumber,
                        chunkEndNumber, Path.GetFileName(tmpChunkPath));
                    Log.Trace("Old chunks total size: {0}, scavenged chunk size: {1}.", oldSize, newSize);
                    _scavengerLog.ChunksNotScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Chunk switch prevented.");

                    return false;
                }
            }
            catch (FileBeingDeletedException exc)
            {
                Log.Info(
                    "Got FileBeingDeletedException exception during scavenging, that probably means some chunks were re-replicated.");
                Log.Info("Scavenging of following chunks will be skipped:");
                Log.Info("{0}", oldChunksList);
                Log.Info("Stopping scavenging and removing temp chunk '{0}'...", tmpChunkPath);
                Log.Info("Exception message: {0}.", exc.Message);
                DeleteTempChunk(tmpChunkPath, MaxRetryCount);
                _scavengerLog.ChunksNotScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, exc.Message);

                return false;
            }
            catch (OperationCanceledException)
            {
                Log.Info("Scavenging cancelled at:");
                Log.Info("{0}", oldChunksList);
                newChunk.MarkForDeletion();                
                _scavengerLog.ChunksNotScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Scavenge cancelled");
                return false;
            }
            catch (Exception ex)
            {
                Log.Info("Got exception while scavenging chunk: #{0}-{1}. This chunk will be skipped\n"
                         + "Exception: {2}.", chunkStartNumber, chunkEndNumber, ex.ToString());
                DeleteTempChunk(tmpChunkPath, MaxRetryCount);
                _scavengerLog.ChunksNotScavenged(chunkStartNumber, chunkEndNumber, sw.Elapsed, ex.Message);

                return false;
            }
        }
Beispiel #54
0
		/// <exception cref="System.IO.IOException"></exception>
		private ObjectId ProcessHaveLines(IList<ObjectId> peerHas, ObjectId last)
		{
			preUploadHook.OnBeginNegotiateRound(this, wantIds, peerHas.Count);
			if (peerHas.IsEmpty())
			{
				return last;
			}
			IList<ObjectId> toParse = peerHas;
			HashSet<ObjectId> peerHasSet = null;
			bool needMissing = false;
			sentReady = false;
			if (wantAll.IsEmpty() && !wantIds.IsEmpty())
			{
				// We have not yet parsed the want list. Parse it now.
				peerHasSet = new HashSet<ObjectId>(peerHas);
				int cnt = wantIds.Count + peerHasSet.Count;
				toParse = new AList<ObjectId>(cnt);
				Sharpen.Collections.AddAll(toParse, wantIds);
				Sharpen.Collections.AddAll(toParse, peerHasSet);
				needMissing = true;
			}
			ICollection<RevObject> notAdvertisedWants = null;
			int haveCnt = 0;
			AsyncRevObjectQueue q = walk.ParseAny(toParse.AsIterable(), needMissing);
			try
			{
				for (; ; )
				{
					RevObject obj;
					try
					{
						obj = q.Next();
					}
					catch (MissingObjectException notFound)
					{
						ObjectId id = notFound.GetObjectId();
						if (wantIds.Contains(id))
						{
							string msg = MessageFormat.Format(JGitText.Get().wantNotValid, id.Name);
							throw new PackProtocolException(msg, notFound);
						}
						continue;
					}
					if (obj == null)
					{
						break;
					}
					// If the object is still found in wantIds, the want
					// list wasn't parsed earlier, and was done in this batch.
					//
					if (wantIds.Remove(obj))
					{
						if (!advertised.Contains(obj) && requestPolicy != UploadPack.RequestPolicy.ANY)
						{
							if (notAdvertisedWants == null)
							{
								notAdvertisedWants = new HashSet<RevObject>();
							}
							notAdvertisedWants.AddItem(obj);
						}
						if (!obj.Has(WANT))
						{
							obj.Add(WANT);
							wantAll.AddItem(obj);
						}
						if (!(obj is RevCommit))
						{
							obj.Add(SATISFIED);
						}
						if (obj is RevTag)
						{
							RevObject target = walk.Peel(obj);
							if (target is RevCommit)
							{
								if (!target.Has(WANT))
								{
									target.Add(WANT);
									wantAll.AddItem(target);
								}
							}
						}
						if (!peerHasSet.Contains(obj))
						{
							continue;
						}
					}
					last = obj;
					haveCnt++;
					if (obj is RevCommit)
					{
						RevCommit c = (RevCommit)obj;
						if (oldestTime == 0 || c.CommitTime < oldestTime)
						{
							oldestTime = c.CommitTime;
						}
					}
					if (obj.Has(PEER_HAS))
					{
						continue;
					}
					obj.Add(PEER_HAS);
					if (obj is RevCommit)
					{
						((RevCommit)obj).Carry(PEER_HAS);
					}
					AddCommonBase(obj);
					switch (multiAck)
					{
						case BasePackFetchConnection.MultiAck.OFF:
						{
							// If both sides have the same object; let the client know.
							//
							if (commonBase.Count == 1)
							{
								pckOut.WriteString("ACK " + obj.Name + "\n");
							}
							break;
						}

						case BasePackFetchConnection.MultiAck.CONTINUE:
						{
							pckOut.WriteString("ACK " + obj.Name + " continue\n");
							break;
						}

						case BasePackFetchConnection.MultiAck.DETAILED:
						{
							pckOut.WriteString("ACK " + obj.Name + " common\n");
							break;
						}
					}
				}
			}
			finally
			{
				q.Release();
			}
			// If the client asked for non advertised object, check our policy.
			if (notAdvertisedWants != null && !notAdvertisedWants.IsEmpty())
			{
				switch (requestPolicy)
				{
					case UploadPack.RequestPolicy.ADVERTISED:
					default:
					{
						throw new PackProtocolException(MessageFormat.Format(JGitText.Get().wantNotValid, 
							notAdvertisedWants.Iterator().Next().Name));
					}

					case UploadPack.RequestPolicy.REACHABLE_COMMIT:
					{
						CheckNotAdvertisedWants(notAdvertisedWants);
						break;
					}

					case UploadPack.RequestPolicy.ANY:
					{
						// Allow whatever was asked for.
						break;
					}
				}
			}
			int missCnt = peerHas.Count - haveCnt;
			// If we don't have one of the objects but we're also willing to
			// create a pack at this point, let the client know so it stops
			// telling us about its history.
			//
			bool didOkToGiveUp = false;
			if (0 < missCnt)
			{
				for (int i = peerHas.Count - 1; i >= 0; i--)
				{
					ObjectId id = peerHas[i];
					if (walk.LookupOrNull(id) == null)
					{
						didOkToGiveUp = true;
						if (OkToGiveUp())
						{
							switch (multiAck)
							{
								case BasePackFetchConnection.MultiAck.OFF:
								{
									break;
								}

								case BasePackFetchConnection.MultiAck.CONTINUE:
								{
									pckOut.WriteString("ACK " + id.Name + " continue\n");
									break;
								}

								case BasePackFetchConnection.MultiAck.DETAILED:
								{
									pckOut.WriteString("ACK " + id.Name + " ready\n");
									sentReady = true;
									break;
								}
							}
						}
						break;
					}
				}
			}
			if (multiAck == BasePackFetchConnection.MultiAck.DETAILED && !didOkToGiveUp && OkToGiveUp
				())
			{
				ObjectId id = peerHas[peerHas.Count - 1];
				sentReady = true;
				pckOut.WriteString("ACK " + id.Name + " ready\n");
				sentReady = true;
			}
			preUploadHook.OnEndNegotiateRound(this, wantAll, haveCnt, missCnt, sentReady);
			peerHas.Clear();
			return last;
		}
Beispiel #55
0
        /// <summary>
        /// Flatten the vector of arrays to an array. Return null if an empty vector was passed, else return
        /// an array containing all the events.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="updateVector">is a list of updates of old and new events</param>
        /// <returns>array with all events</returns>
        public static UniformPair <T[]> FlattenBatchStream <T>(IList <UniformPair <T[]> > updateVector)
        {
            if (updateVector.IsEmpty())
            {
                return(new UniformPair <T[]>(null, null));
            }

            if (updateVector.Count == 1)
            {
                return(new UniformPair <T[]>(updateVector[0].First, updateVector[0].Second));
            }

            int totalNewEvents = 0;
            int totalOldEvents = 0;

            foreach (var pair in updateVector)
            {
                if (pair.First != null)
                {
                    totalNewEvents += pair.First.Length;
                }
                if (pair.Second != null)
                {
                    totalOldEvents += pair.Second.Length;
                }
            }

            if ((totalNewEvents == 0) && (totalOldEvents == 0))
            {
                return(new UniformPair <T[]>(null, null));
            }

            T[] newEvents = null;
            T[] oldEvents = null;
            if (totalNewEvents != 0)
            {
                newEvents = new T[totalNewEvents];
            }
            if (totalOldEvents != 0)
            {
                oldEvents = new T[totalOldEvents];
            }

            int destPosNew = 0;
            int destPosOld = 0;

            foreach (var pair in updateVector)
            {
                T[] newData = pair.First;
                T[] oldData = pair.Second;

                if (newData != null)
                {
                    int newDataLen = newData.Length;
                    Array.Copy(newData, 0, newEvents, destPosNew, newDataLen);
                    destPosNew += newDataLen;
                }
                if (oldData != null)
                {
                    int oldDataLen = oldData.Length;
                    Array.Copy(oldData, 0, oldEvents, destPosOld, oldDataLen);
                    destPosOld += oldDataLen;
                }
            }

            return(new UniformPair <T[]>(newEvents, oldEvents));
        }
        private bool MergeChunks(IList <TFChunk.TFChunk> oldChunks, CancellationToken ct)
        {
            if (oldChunks.IsEmpty())
            {
                throw new ArgumentException("Provided list of chunks to merge is empty.");
            }

            var oldChunksList = string.Join("\n", oldChunks);

            if (oldChunks.Count < 2)
            {
                Log.Trace("SCAVENGING: Tried to merge less than 2 chunks, aborting: {oldChunksList}", oldChunksList);
                return(false);
            }

            var sw = Stopwatch.StartNew();

            int chunkStartNumber = oldChunks.First().ChunkHeader.ChunkStartNumber;
            int chunkEndNumber   = oldChunks.Last().ChunkHeader.ChunkEndNumber;

            var tmpChunkPath = Path.Combine(_db.Config.Path, Guid.NewGuid() + ".merge.scavenge.tmp");

            Log.Trace("SCAVENGING: started to merge chunks: {oldChunksList}"
                      + "\nResulting temp chunk file: {tmpChunkPath}.",
                      oldChunksList, Path.GetFileName(tmpChunkPath));

            TFChunk.TFChunk newChunk;
            try
            {
                newChunk = TFChunk.TFChunk.CreateNew(tmpChunkPath,
                                                     _db.Config.ChunkSize,
                                                     chunkStartNumber,
                                                     chunkEndNumber,
                                                     isScavenged: true,
                                                     inMem: _db.Config.InMemDb,
                                                     unbuffered: _db.Config.Unbuffered,
                                                     writethrough: _db.Config.WriteThrough,
                                                     initialReaderCount: _db.Config.InitialReaderCount,
                                                     reduceFileCachePressure: _db.Config.ReduceFileCachePressure);
            }
            catch (IOException exc)
            {
                Log.ErrorException(exc, "IOException during creating new chunk for scavenging merge purposes. Stopping scavenging merge process...");
                return(false);
            }

            try
            {
                var oldVersion = oldChunks.Any(x => x.ChunkHeader.Version != TFChunk.TFChunk.CurrentChunkVersion);

                var positionMapping = new List <PosMap>();
                foreach (var oldChunk in oldChunks)
                {
                    TraverseChunkBasic(oldChunk, ct,
                                       result => positionMapping.Add(WriteRecord(newChunk, result.LogRecord)));
                }

                newChunk.CompleteScavenge(positionMapping);

                if (_unsafeIgnoreHardDeletes)
                {
                    Log.Trace("Forcing merged chunk to be kept even if bigger.");
                }

                if (oldVersion)
                {
                    Log.Trace("Forcing merged chunk to be kept as old chunk is a previous version.");
                }

                var chunk = _db.Manager.SwitchChunk(newChunk, verifyHash: false, removeChunksWithGreaterNumbers: false);
                if (chunk != null)
                {
                    Log.Trace(
                        "Merging of chunks:"
                        + "\n{oldChunksList}"
                        + "\ncompleted in {elapsed}."
                        + "\nNew chunk: {tmpChunkPath} --> #{chunkStartNumber}-{chunkEndNumber} ({newChunk}).",
                        oldChunksList, sw.Elapsed, Path.GetFileName(tmpChunkPath), chunkStartNumber, chunkEndNumber, Path.GetFileName(chunk.FileName));
                    var spaceSaved = oldChunks.Sum(_ => _.FileSize) - newChunk.FileSize;
                    _scavengerLog.ChunksMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, spaceSaved);
                    return(true);
                }
                else
                {
                    Log.Trace(
                        "Merging of chunks:"
                        + "\n{oldChunksList}"
                        + "\ncompleted in {elapsed}."
                        + "\nBut switching was prevented for new chunk: #{chunkStartNumber}-{chunkEndNumber} ({tmpChunkPath}).",
                        oldChunksList, sw.Elapsed, chunkStartNumber, chunkEndNumber, Path.GetFileName(tmpChunkPath));
                    _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Chunk switch prevented.");
                    return(false);
                }
            }
            catch (FileBeingDeletedException exc)
            {
                Log.Info("Got FileBeingDeletedException exception during scavenge merging, that probably means some chunks were re-replicated."
                         + "\nMerging of following chunks will be skipped:"
                         + "\n{oldChunksList}"
                         + "\nStopping merging and removing temp chunk '{tmpChunkPath}'..."
                         + "\nException message: {e}.",
                         oldChunksList, tmpChunkPath, exc.Message);
                newChunk.Dispose();
                DeleteTempChunk(tmpChunkPath, MaxRetryCount);
                _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, exc.Message);
                return(false);
            }
            catch (OperationCanceledException)
            {
                Log.Info("Scavenging cancelled at:"
                         + "\n{oldChunksList}",
                         oldChunksList);
                newChunk.MarkForDeletion();
                _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Scavenge cancelled");
                return(false);
            }
            catch (Exception ex)
            {
                Log.Info("Got exception while merging chunk:"
                         + "\n{oldChunks}"
                         + "\nException: {e}",
                         oldChunks, ex.ToString()
                         );
                newChunk.Dispose();
                DeleteTempChunk(tmpChunkPath, MaxRetryCount);
                _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, ex.Message);
                return(false);
            }
        }
Beispiel #57
0
 protected virtual bool IsAvailableWithAttributeInstances([NotNull] IList <IAttributeInstance> existingAttributes)
 {
     return(existingAttributes.IsEmpty());
 }
Beispiel #58
0
 internal JsExpression InitializersToJson(IList <ResolveResult> initializerStatements, IType type)
 {
     //if (Sk.IsNativeArrayEnumerator(type.GetDefinition()))
     //FIX for issue 325:
     if (Sk.IsNativeArrayEnumerator(type.GetDefinition()) && (initializerStatements.IsEmpty() || initializerStatements[0] is CSharpInvocationResolveResult))
     {
         var items  = initializerStatements.Cast <CSharpInvocationResolveResult>().Select(t => t.Arguments[0]).ToList();
         var items2 = VisitExpressions(items);
         var arr    = Js.NewJsonArray(items2.ToArray());
         return(arr);
     }
     else
     {
         var json = Js.Json();
         foreach (var st in initializerStatements)
         {
             if (st is OperatorResolveResult)
             {
                 var op    = (OperatorResolveResult)st;
                 var mrr   = (MemberResolveResult)op.Operands[0];
                 var name  = SkJs.GetEntityJsName(mrr.Member);
                 var value = VisitExpression(op.Operands[1]);
                 var pair  = Js.JsonNameValue(name, value);
                 if (mrr.TargetResult is MemberResolveResult)   //happens when using object initializers to set inner properties, e.g. new Parent { Child = { Name="ggg" } }
                 {
                     var targetMrr = (MemberResolveResult)mrr.TargetResult;
                     var name2     = SkJs.GetEntityJsName(targetMrr.Member);
                     var innerJson = Js.Json();
                     innerJson.Add(pair);
                     pair = Js.JsonNameValue(name2, innerJson);
                 }
                 json.Add(pair);
             }
             else if (st is InvocationResolveResult)
             {
                 var irr       = (InvocationResolveResult)st;
                 var targetMrr = irr.TargetResult as MemberResolveResult;
                 if (targetMrr == null)
                 {
                     throw new CompilerException(st.GetFirstNode(), "Expected MemberResolveResult");
                 }
                 var name = SkJs.GetEntityJsName(targetMrr.Member);
                 if (irr.Arguments.Count != 1)
                 {
                     throw new CompilerException(st.GetFirstNode(), "Expected one argument, not " + name + " " + irr.Arguments.Count);
                 }
                 var value      = VisitExpression(irr.Arguments[0]);
                 var jsonMember = json.NamesValues.NotNull().FirstOrDefault(t => t.Name.Name == name);
                 if (jsonMember == null)
                 {
                     json.Add(name, Js.NewJsonArray(value));
                 }
                 else
                 {
                     var array = jsonMember.Value as JsJsonArrayExpression;
                     if (array == null)
                     {
                         throw new CompilerException(st.GetFirstNode(), "json member value array not found " + name);
                     }
                     array.Items.Add(value);
                 }
             }
             else
             {
                 throw new NotImplementedException();
             }
         }
         //var inits2 = initializerStatements.Select(t => Visit(t)).ToList();
         //var namesValues = inits2.Cast<JsBinaryExpression>().Select(t => Js.JsonNameValue(((JsMemberExpression)t.Left).Name, t.Right)).ToList();
         //var json = Js.Json();
         //json.NamesValues = namesValues;
         return(json);
     }
 }
        public virtual ICounter <string> GetFeatures(Example example, IDictionary <int, CompressedFeatureVector> mentionFeatures, Compressor <string> compressor)
        {
            ICounter <string> features     = new ClassicCounter <string>();
            ICounter <string> pairFeatures = new ClassicCounter <string>();
            ICounter <string> features1    = new ClassicCounter <string>();
            ICounter <string> features2    = compressor.Uncompress(mentionFeatures[example.mentionId2]);

            if (!example.IsNewLink())
            {
                System.Diagnostics.Debug.Assert((!anaphoricityClassifier));
                pairFeatures = compressor.Uncompress(example.pairwiseFeatures);
                features1    = compressor.Uncompress(mentionFeatures[example.mentionId1]);
            }
            else
            {
                features2.IncrementCount("bias");
            }
            if (!disallowedPrefixes.IsEmpty())
            {
                features1    = FilterOut(features1, disallowedPrefixes);
                features2    = FilterOut(features2, disallowedPrefixes);
                pairFeatures = FilterOut(pairFeatures, disallowedPrefixes);
            }
            IList <string> ids1 = example.IsNewLink() ? new List <string>() : Identifiers(features1, example.mentionType1);
            IList <string> ids2 = Identifiers(features2, example.mentionType2);

            features.AddAll(pairFeatures);
            foreach (string id1 in ids1)
            {
                foreach (string id2 in ids2)
                {
                    if (pairConjunctions.Contains(MetaFeatureExtractor.PairConjunction.First))
                    {
                        features.AddAll(GetConjunction(pairFeatures, "_m1=" + id1));
                    }
                    if (pairConjunctions.Contains(MetaFeatureExtractor.PairConjunction.Last))
                    {
                        features.AddAll(GetConjunction(pairFeatures, "_m2=" + id2));
                    }
                    if (pairConjunctions.Contains(MetaFeatureExtractor.PairConjunction.Both))
                    {
                        features.AddAll(GetConjunction(pairFeatures, "_ms=" + id1 + "_" + id2));
                    }
                    if (singleConjunctions.Contains(MetaFeatureExtractor.SingleConjunction.Index))
                    {
                        features.AddAll(GetConjunction(features1, "_1"));
                        features.AddAll(GetConjunction(features2, "_2"));
                    }
                    if (singleConjunctions.Contains(MetaFeatureExtractor.SingleConjunction.IndexCurrent))
                    {
                        features.AddAll(GetConjunction(features1, "_1" + "_m=" + id1));
                        features.AddAll(GetConjunction(features2, "_2" + "_m=" + id2));
                    }
                    if (singleConjunctions.Contains(MetaFeatureExtractor.SingleConjunction.IndexLast))
                    {
                        features.AddAll(GetConjunction(features1, "_1" + "_m2=" + id2));
                        features.AddAll(GetConjunction(features2, "_2" + "_m2=" + id2));
                    }
                    if (singleConjunctions.Contains(MetaFeatureExtractor.SingleConjunction.IndexOther))
                    {
                        features.AddAll(GetConjunction(features1, "_1" + "_m=" + id2));
                        features.AddAll(GetConjunction(features2, "_2" + "_m=" + id1));
                    }
                    if (singleConjunctions.Contains(MetaFeatureExtractor.SingleConjunction.IndexBoth))
                    {
                        features.AddAll(GetConjunction(features1, "_1" + "_ms=" + id1 + "_" + id2));
                        features.AddAll(GetConjunction(features2, "_2" + "_ms=" + id1 + "_" + id2));
                    }
                }
            }
            if (example.IsNewLink())
            {
                features.AddAll(features2);
                features.AddAll(GetConjunction(features2, "_m=" + ids2[0]));
                ICounter <string> newFeatures = new ClassicCounter <string>();
                foreach (KeyValuePair <string, double> e in features.EntrySet())
                {
                    newFeatures.IncrementCount(e.Key + "_NEW", e.Value);
                }
                features = newFeatures;
            }
            return(features);
        }
Beispiel #60
0
		/// <exception cref="System.IO.IOException"></exception>
		private ObjectId ProcessHaveLines(IList<ObjectId> peerHas, ObjectId last)
		{
			if (peerHas.IsEmpty())
			{
				return last;
			}
			// If both sides have the same object; let the client know.
			//
			AsyncRevObjectQueue q = walk.ParseAny(peerHas.AsIterable(), false);
			try
			{
				for (; ; )
				{
					RevObject obj;
					try
					{
						obj = q.Next();
					}
					catch (MissingObjectException)
					{
						continue;
					}
					if (obj == null)
					{
						break;
					}
					last = obj;
					if (obj.Has(PEER_HAS))
					{
						continue;
					}
					obj.Add(PEER_HAS);
					if (obj is RevCommit)
					{
						((RevCommit)obj).Carry(PEER_HAS);
					}
					AddCommonBase(obj);
					switch (multiAck)
					{
						case BasePackFetchConnection.MultiAck.OFF:
						{
							if (commonBase.Count == 1)
							{
								pckOut.WriteString("ACK " + obj.Name + "\n");
							}
							break;
						}

						case BasePackFetchConnection.MultiAck.CONTINUE:
						{
							pckOut.WriteString("ACK " + obj.Name + " continue\n");
							break;
						}

						case BasePackFetchConnection.MultiAck.DETAILED:
						{
							pckOut.WriteString("ACK " + obj.Name + " common\n");
							break;
						}
					}
				}
			}
			finally
			{
				q.Release();
			}
			// If we don't have one of the objects but we're also willing to
			// create a pack at this point, let the client know so it stops
			// telling us about its history.
			//
			for (int i = peerHas.Count - 1; i >= 0; i--)
			{
				ObjectId id = peerHas[i];
				if (walk.LookupOrNull(id) == null)
				{
					if (OkToGiveUp())
					{
						switch (multiAck)
						{
							case BasePackFetchConnection.MultiAck.OFF:
							{
								break;
							}

							case BasePackFetchConnection.MultiAck.CONTINUE:
							{
								pckOut.WriteString("ACK " + id.Name + " continue\n");
								break;
							}

							case BasePackFetchConnection.MultiAck.DETAILED:
							{
								pckOut.WriteString("ACK " + id.Name + " ready\n");
								break;
							}
						}
					}
					break;
				}
			}
			peerHas.Clear();
			return last;
		}