/// <summary> /// Update all touches /// </summary> void UpdateTouches() { touchPressedThisFrame = false; for (int i = 0; i < UnityInput.touchCount; ++i) { Touch touch = UnityInput.GetTouch(i); // Find existing touch, or create new one TouchInfo existingTouch = m_Touches.FirstOrDefault(t => t.touchId == touch.fingerId); if (existingTouch == null) { existingTouch = new TouchInfo { touchId = touch.fingerId, startPosition = touch.position, currentPosition = touch.position, previousPosition = touch.position, startTime = Time.realtimeSinceStartup, startedOverUI = EventSystem.current.IsPointerOverGameObject(touch.fingerId) }; m_Touches.Add(existingTouch); // Sanity check Debug.Assert(touch.phase == TouchPhase.Began); } switch (touch.phase) { case TouchPhase.Began: touchPressedThisFrame = true; if (pressed != null) { pressed(existingTouch); } break; case TouchPhase.Moved: bool wasDrag = existingTouch.isDrag; UpdateMovingFinger(touch, existingTouch); // Is this a drag? existingTouch.isDrag = existingTouch.totalMovement >= dragThresholdTouch; if (existingTouch.isDrag) { if (existingTouch.isHold) { existingTouch.wasHold = existingTouch.isHold; existingTouch.isHold = false; } // Did it just start now? if (!wasDrag) { if (startedDrag != null) { startedDrag(existingTouch); } } if (dragged != null) { dragged(existingTouch); } if (existingTouch.delta.sqrMagnitude > flickThreshold * flickThreshold) { existingTouch.flickVelocity = (existingTouch.flickVelocity * (1 - k_FlickAccumulationFactor)) + (existingTouch.delta * k_FlickAccumulationFactor); } else { existingTouch.flickVelocity = Vector2.zero; } } else { UpdateHoldingFinger(existingTouch); } break; case TouchPhase.Canceled: case TouchPhase.Ended: // Could have moved a bit UpdateMovingFinger(touch, existingTouch); // Quick enough (with no drift) to be a tap? if (!existingTouch.isDrag && Time.realtimeSinceStartup - existingTouch.startTime < tapTime) { if (tapped != null) { tapped(existingTouch); } } if (released != null) { released(existingTouch); } // Remove from track list m_Touches.Remove(existingTouch); break; case TouchPhase.Stationary: UpdateMovingFinger(touch, existingTouch); UpdateHoldingFinger(existingTouch); existingTouch.flickVelocity = Vector2.zero; break; } } if (activeTouchCount >= 2 && (m_Touches[0].isDrag || m_Touches[1].isDrag)) { if (pinched != null) { pinched(new PinchInfo { touch1 = m_Touches[0], touch2 = m_Touches[1] }); } } }
public static void WriteLine(String message) { Trace.WriteLine(message); }
private IEnumerable <Job2> CreateJobsFromWorkflows(IEnumerable <Job2> workflowJobs, bool returnParents) { // Jobs in this collection correspond to the ContainerParentJob objects. PSWorkflowJob objects // are children of these. var reconstructedParentJobs = new Dictionary <Guid, Job2>(); var jobs = new List <Job2>(); if (workflowJobs == null) { return(jobs); } // If a workflow instance has incomplete metadata, we do not create the job for it. foreach (var job in workflowJobs) { var wfjob = job as PSWorkflowJob; Debug.Assert(wfjob != null, "Job supplied must be of type PSWorkflowJob"); PSWorkflowInstance instance = wfjob.PSWorkflowInstance; Dbg.Assert(instance != null, "PSWorkflowInstance should be reconstructed before attempting to rehydrate job"); if (!instance.JobStateRetrieved || instance.PSWorkflowContext.JobMetadata == null || instance.PSWorkflowContext.JobMetadata.Count == 0) { continue; } object data; string name, command; Guid instanceId; if (!GetJobInfoFromMetadata(instance, out command, out name, out instanceId)) { continue; } if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentInstanceId, out data)) { continue; } var parentInstanceId = (Guid)data; // If the parent job is needed, find or create it now so that the ID is sequentially lower. if (returnParents && !reconstructedParentJobs.ContainsKey(parentInstanceId)) { if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentName, out data)) { continue; } var parentName = (string)data; if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentCommand, out data)) { continue; } var parentCommand = (string)data; JobIdentifier parentId = RetrieveJobIdForReuse(parentInstanceId); ContainerParentJob parentJob = parentId != null ? new ContainerParentJob(parentCommand, parentName, parentId, AdapterTypeName) : new ContainerParentJob(parentCommand, parentName, parentInstanceId, AdapterTypeName); // update job metadata with new parent session Id--needed for filtering. // The pid in the metadata has already been updated at this point. Dbg.Assert( instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataParentSessionId), "Job Metadata for instance incomplete."); if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataParentSessionId)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataParentSessionId] = parentJob.Id; } reconstructedParentJobs.Add(parentInstanceId, parentJob); } // update job metadata with new session Id--needed for filtering. Dbg.Assert(instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataSessionId), "Job Metadata for instance incomplete."); Dbg.Assert(instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataPid), "Job Metadata for instance incomplete."); if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataSessionId)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataSessionId] = job.Id; } if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataPid)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataPid] = Process.GetCurrentProcess().Id; } job.StartParameters = new List <CommandParameterCollection>(); CommandParameterCollection commandParameterCollection = new CommandParameterCollection(); AddStartParametersFromCollection(instance.PSWorkflowContext.WorkflowParameters, commandParameterCollection); AddStartParametersFromCollection(instance.PSWorkflowContext.PSWorkflowCommonParameters, commandParameterCollection); bool takesPSPrivateMetadata; if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.WorkflowTakesPrivateMetadata)) { takesPSPrivateMetadata = (bool)instance.PSWorkflowContext.JobMetadata[Constants.WorkflowTakesPrivateMetadata]; } else { DynamicActivity da = instance.PSWorkflowDefinition != null ? instance.PSWorkflowDefinition.Workflow as DynamicActivity : null; takesPSPrivateMetadata = da != null && da.Properties.Contains(Constants.PrivateMetadata); } // If there is Private Metadata and it is not included in the "Input" collection, add it now. if (instance.PSWorkflowContext.PrivateMetadata != null && instance.PSWorkflowContext.PrivateMetadata.Count > 0 && !takesPSPrivateMetadata) { Hashtable privateMetadata = new Hashtable(); foreach (var pair in instance.PSWorkflowContext.PrivateMetadata) { privateMetadata.Add(pair.Key, pair.Value); } commandParameterCollection.Add(new CommandParameter(Constants.PrivateMetadata, privateMetadata)); } job.StartParameters.Add(commandParameterCollection); if (returnParents) { ((ContainerParentJob)reconstructedParentJobs[parentInstanceId]).AddChildJob(job); } else { jobs.Add(job); } if (!wfjob.WorkflowInstanceLoaded) { // RestoreFromWorkflowInstance sets the job state. Because we've used AddChildJob, the parent's state will be // updated automatically. wfjob.RestoreFromWorkflowInstance(instance); } } if (returnParents) { jobs.AddRange(reconstructedParentJobs.Values); } return(jobs); }
public static void Cond(bool condition, string msg) { SDebug.Assert(condition, msg); }
protected override object PrepareCellForEdit(FrameworkElement editingElement, RoutedEventArgs editingEventArgs) { DiagnosticsDebug.Assert(false, "Unexpected call to DataGridFillerColumn.PrepareCellForEdit."); return(null); }
public static void Index(int i, int max, bool allowNegative = false) { string expected = allowNegative ? $"less than {max - 1}" : $"within 0 to {max - 1}"; SDebug.Assert((i > -1 || allowNegative) && i < max, $"invalid index: expected {expected}, got {i}"); }
public static void Sign(long l) { SDebug.Assert(l >= 0, $"expected positive integral, got {l}"); }
public void PlayReversed() { Debug.Assert(Camera.main != null, "Camera.main != null"); AudioSource.PlayClipAtPoint(clips[(int)Clips.Reversed], Camera.main.transform.position); }
public void PlayNumberLines(int n) { n = n - 1; Debug.Assert(Camera.main != null, "Camera.main != null"); AudioSource.PlayClipAtPoint(clips[(int)Clips.OneLine + n], Camera.main.transform.position); }
public void PlayHorizontalMove(Tetrimo t) { Debug.Assert(Camera.main != null, "Camera.main != null"); AudioSource.PlayClipAtPoint(clips[(int)Clips.Move], Camera.main.transform.position); }
public void PlayStopped(Tetrimo t) { Debug.Assert(Camera.main != null, "Camera.main != null"); AudioSource.PlayClipAtPoint(clips[(int)Clips.Stopped], Camera.main.transform.position); }
/// <summary> /// Measures the children of a <see cref="DataGridColumnHeadersPresenter"/> to /// prepare for arranging them during the <see cref="M:System.Windows.FrameworkElement.ArrangeOverride(System.Windows.Size)"/> pass. /// </summary> /// <param name="availableSize"> /// The available size that this element can give to child elements. Indicates an upper limit that child elements should not exceed. /// </param> /// <returns> /// The size that the <see cref="DataGridColumnHeadersPresenter"/> determines it needs during layout, based on its calculations of child object allocated sizes. /// </returns> protected override Size MeasureOverride(Size availableSize) { if (this.OwningGrid == null) { return(base.MeasureOverride(availableSize)); } if (!this.OwningGrid.AreColumnHeadersVisible) { return(new Size(0.0, 0.0)); } double height = this.OwningGrid.ColumnHeaderHeight; bool autoSizeHeight; if (double.IsNaN(height)) { // No explicit height values were set so we can autosize height = 0; autoSizeHeight = true; } else { autoSizeHeight = false; } double totalDisplayWidth = 0; this.OwningGrid.ColumnsInternal.EnsureVisibleEdgedColumnsWidth(); DataGridColumn lastVisibleColumn = this.OwningGrid.ColumnsInternal.LastVisibleColumn; foreach (DataGridColumn column in this.OwningGrid.ColumnsInternal.GetVisibleColumns()) { // Measure each column header bool autoGrowWidth = column.Width.IsAuto || column.Width.IsSizeToHeader; DataGridColumnHeader columnHeader = column.HeaderCell; if (column != lastVisibleColumn) { columnHeader.UpdateSeparatorVisibility(lastVisibleColumn); } // If we're not using star sizing or the current column can't be resized, // then just set the display width according to the column's desired width if (!this.OwningGrid.UsesStarSizing || (!column.ActualCanUserResize && !column.Width.IsStar)) { // In the edge-case where we're given infinite width and we have star columns, the // star columns grow to their predefined limit of 10,000 (or their MaxWidth) double newDisplayWidth = column.Width.IsStar ? Math.Min(column.ActualMaxWidth, DataGrid.DATAGRID_maximumStarColumnWidth) : Math.Max(column.ActualMinWidth, Math.Min(column.ActualMaxWidth, column.Width.DesiredValue)); column.SetWidthDisplayValue(newDisplayWidth); } // If we're auto-growing the column based on the header content, we want to measure it at its maximum value if (autoGrowWidth) { columnHeader.Measure(new Size(column.ActualMaxWidth, double.PositiveInfinity)); this.OwningGrid.AutoSizeColumn(column, columnHeader.DesiredSize.Width); column.ComputeLayoutRoundedWidth(totalDisplayWidth); } else if (!this.OwningGrid.UsesStarSizing) { column.ComputeLayoutRoundedWidth(totalDisplayWidth); columnHeader.Measure(new Size(column.LayoutRoundedWidth, double.PositiveInfinity)); } // We need to track the largest height in order to auto-size if (autoSizeHeight) { height = Math.Max(height, columnHeader.DesiredSize.Height); } totalDisplayWidth += column.ActualWidth; } // If we're using star sizing (and we're not waiting for an auto-column to finish growing) // then we will resize all the columns to fit the available space. if (this.OwningGrid.UsesStarSizing && !this.OwningGrid.AutoSizingColumns) { double adjustment = double.IsPositiveInfinity(availableSize.Width) ? this.OwningGrid.CellsWidth : availableSize.Width - totalDisplayWidth; this.OwningGrid.AdjustColumnWidths(0, adjustment, false); // Since we didn't know the final widths of the columns until we resized, // we waited until now to measure each header double leftEdge = 0; foreach (var column in this.OwningGrid.ColumnsInternal.GetVisibleColumns()) { column.ComputeLayoutRoundedWidth(leftEdge); column.HeaderCell.Measure(new Size(column.LayoutRoundedWidth, double.PositiveInfinity)); if (autoSizeHeight) { height = Math.Max(height, column.HeaderCell.DesiredSize.Height); } leftEdge += column.ActualWidth; } } // Add the filler column if it's not represented. We won't know whether we need it or not until Arrange DataGridFillerColumn fillerColumn = this.OwningGrid.ColumnsInternal.FillerColumn; if (!fillerColumn.IsRepresented) { DiagnosticsDebug.Assert(!this.Children.Contains(fillerColumn.HeaderCell), "Unexpected parent for filler column header cell."); fillerColumn.HeaderCell.SeparatorVisibility = Visibility.Collapsed; this.Children.Insert(this.OwningGrid.ColumnsInternal.Count, fillerColumn.HeaderCell); fillerColumn.IsRepresented = true; // Optimize for the case where we don't need the filler cell fillerColumn.HeaderCell.Visibility = Visibility.Collapsed; } fillerColumn.HeaderCell.Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); if (this.DragIndicator != null) { this.DragIndicator.Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); } if (this.DropLocationIndicator != null) { this.DropLocationIndicator.Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); } this.OwningGrid.ColumnsInternal.EnsureVisibleEdgedColumnsWidth(); return(new Size(this.OwningGrid.ColumnsInternal.VisibleEdgedColumnsWidth, height)); }
/// <summary> /// Arranges the content of the <see cref="DataGridColumnHeadersPresenter"/>. /// </summary> /// <returns> /// The actual size used by the <see cref="DataGridColumnHeadersPresenter"/>. /// </returns> /// <param name="finalSize"> /// The final area within the parent that this element should use to arrange itself and its children. /// </param> protected override Size ArrangeOverride(Size finalSize) { if (this.OwningGrid == null) { return(base.ArrangeOverride(finalSize)); } if (this.OwningGrid.AutoSizingColumns) { // When we initially load an auto-column, we have to wait for all the rows to be measured // before we know its final desired size. We need to trigger a new round of measures now // that the final sizes have been calculated. this.OwningGrid.AutoSizingColumns = false; return(base.ArrangeOverride(finalSize)); } double dragIndicatorLeftEdge = 0; double frozenLeftEdge = 0; double scrollingLeftEdge = -this.OwningGrid.HorizontalOffset; foreach (DataGridColumn dataGridColumn in this.OwningGrid.ColumnsInternal.GetVisibleColumns()) { DataGridColumnHeader columnHeader = dataGridColumn.HeaderCell; DiagnosticsDebug.Assert(columnHeader.OwningColumn == dataGridColumn, "Expected columnHeader owned by dataGridColumn."); if (dataGridColumn.IsFrozen) { columnHeader.Arrange(new Rect(frozenLeftEdge, 0, dataGridColumn.LayoutRoundedWidth, finalSize.Height)); columnHeader.Clip = null; // The layout system could have clipped this because it's not aware of our render transform if (this.DragColumn == dataGridColumn && this.DragIndicator != null) { dragIndicatorLeftEdge = frozenLeftEdge + this.DragIndicatorOffset; } frozenLeftEdge += dataGridColumn.ActualWidth; } else { columnHeader.Arrange(new Rect(scrollingLeftEdge, 0, dataGridColumn.LayoutRoundedWidth, finalSize.Height)); EnsureColumnHeaderClip(columnHeader, dataGridColumn.ActualWidth, finalSize.Height, frozenLeftEdge, scrollingLeftEdge); if (this.DragColumn == dataGridColumn && this.DragIndicator != null) { dragIndicatorLeftEdge = scrollingLeftEdge + this.DragIndicatorOffset; } } scrollingLeftEdge += dataGridColumn.ActualWidth; } if (this.DragColumn != null) { if (this.DragIndicator != null) { this.EnsureColumnReorderingClip(this.DragIndicator, finalSize.Height, frozenLeftEdge, dragIndicatorLeftEdge); this.DragIndicator.Arrange(new Rect(dragIndicatorLeftEdge, 0, this.DragIndicator.ActualWidth, this.DragIndicator.ActualHeight)); } if (this.DropLocationIndicator != null) { this.EnsureColumnReorderingClip(this.DropLocationIndicator, finalSize.Height, frozenLeftEdge, this.DropLocationIndicatorOffset); this.DropLocationIndicator.Arrange(new Rect(this.DropLocationIndicatorOffset, 0, this.DropLocationIndicator.ActualWidth, this.DropLocationIndicator.ActualHeight)); } } // Arrange filler this.OwningGrid.OnFillerColumnWidthNeeded(finalSize.Width); DataGridFillerColumn fillerColumn = this.OwningGrid.ColumnsInternal.FillerColumn; if (fillerColumn.FillerWidth > 0) { fillerColumn.HeaderCell.Visibility = Visibility.Visible; fillerColumn.HeaderCell.Arrange(new Rect(scrollingLeftEdge, 0, fillerColumn.FillerWidth, finalSize.Height)); } else { fillerColumn.HeaderCell.Visibility = Visibility.Collapsed; } // This needs to be updated after the filler column is configured DataGridColumn lastVisibleColumn = this.OwningGrid.ColumnsInternal.LastVisibleColumn; if (lastVisibleColumn != null) { lastVisibleColumn.HeaderCell.UpdateSeparatorVisibility(lastVisibleColumn); } return(finalSize); }
internal static void WriteLine(string message) => SystemDebug.WriteLine(message);
public static void CanCastTo <T>(object obj) { SDebug.Assert(obj is T, $"{obj.GetType().FullName} cannot be casted to {typeof(T).FullName}"); }
/// <summary> /// The clear timeout. /// </summary> private void ClearTimeout() { Debug.WriteLine("Clearing license checker timeout."); this.checker.handler.RemoveCallbacks(this.onTimeout); }
public static void CanCastTo(object obj, Type type) { SDebug.Assert(type.IsAssignableFrom(obj.GetType()), $"{obj.GetType().FullName} cannot be casted to {type.FullName}"); }
/// <summary> /// The start timeout. /// </summary> private void StartTimeout() { Debug.WriteLine("Start monitoring license checker timeout."); this.checker.handler.PostDelayed(this.onTimeout, TimeoutMs); }
public static void Count(int ct, int max) { SDebug.Assert(ct > -1 && ct <= max, $"invalid count: expected within 0 to {max}, got {ct}"); }
public override void OnInspectorGUI() { var type = serializedObject.FindProperty("MyType"); MyTarget = target as BbSpriteLayout; // Preview button EditorGUILayout.BeginHorizontal(); GUILayout.FlexibleSpace(); var content = new GUIContent( Window_AtlasPreview.IsOpened ? "Close preview" : "Open preview...", "Toggle a window that lets you view the layout of the selected BbSprite" ); if (GUILayout.Button(content)) { TogglePreview(); } EditorGUILayout.EndHorizontal(); // Type content = new GUIContent ("Type", "The layout configuration type for this sprite's sheet."); EditorGUILayout.PropertyField(type, content); //PseudoDefaultInspector.Draw(serializedObject, "_myLayoutAsCustomGrid"); //var prop_grid = serializedObject.FindProperty("_myLayoutAsCustomGrid"); //if (prop_grid.objectReferenceValue != null && !prop_grid.hasMultipleDifferentValues) //{ // _isGridEditorOpened = EditorGUILayout.Foldout(_isGridEditorOpened, "Clip properties"); // if (_isGridEditorOpened) // { // var grids = new List<LayoutData_CustomGrid>(); // foreach (var oTarget in serializedObject.targetObjects) // { // var myTarget = oTarget as BbSpriteLayout; // Debug.Assert(myTarget != null, "MyTarget != null"); // grids.Add(myTarget._myLayoutAsCustomGrid); // } // var obj_clip = new SerializedObject(grids.ToArray()); // PseudoDefaultInspector.Draw(obj_clip); // } //} // Show extra ui for certain layout types //if ( // type.enumValueIndex == (int)BbSpriteLayout.Type.CustomGrid && // !type.hasMultipleDifferentValues) //{ // DrawUi_CustomGrid(); //} // Update properties and, if they changed, call sprite's Update if (serializedObject.ApplyModifiedProperties() || Event.current.commandName == "UndoRedoPerformed") { foreach (var oTarget in serializedObject.targetObjects) { var myTarget = oTarget as BbSpriteLayout; Debug.Assert(myTarget != null, "myTarget != null"); myTarget.Update(); var sprite = myTarget.GetComponent <BbSprite>(); if (sprite != null) { sprite.Update(); } } } }
public static void Sign(double d) { SDebug.Assert(d >= 0.0, $"expected positive floating point, got {d}"); }
/// <summary> /// IF THESE SEMANTICS EVER CHANGE UPDATE THE LOGIC WHICH DEFINES THIS BEHAVIOR IN /// THE DYNAMIC TYPE LOADER AS WELL AS THE COMPILER. /// /// Parameter's are considered to have type layout dependent on their generic instantiation /// if the type of the parameter in its signature is a type variable, or if the type is a generic /// structure which meets 2 characteristics: /// 1. Structure size/layout is affected by the size/layout of one or more of its generic parameters /// 2. One or more of the generic parameters is a type variable, or a generic structure which also recursively /// would satisfy constraint 2. (Note, that in the recursion case, whether or not the structure is affected /// by the size/layout of its generic parameters is not investigated.) /// /// Examples parameter types, and behavior. /// /// T -> true /// List<T> -> false /// StructNotDependentOnArgsForSize<T> -> false /// GenStructDependencyOnArgsForSize<T> -> true /// StructNotDependentOnArgsForSize<GenStructDependencyOnArgsForSize<T>> -> true /// StructNotDependentOnArgsForSize<GenStructDependencyOnArgsForSize<List<T>>>> -> false /// /// Example non-parameter type behavior /// T -> true /// List<T> -> false /// StructNotDependentOnArgsForSize<T> -> *true* /// GenStructDependencyOnArgsForSize<T> -> true /// StructNotDependentOnArgsForSize<GenStructDependencyOnArgsForSize<T>> -> true /// StructNotDependentOnArgsForSize<GenStructDependencyOnArgsForSize<List<T>>>> -> false /// </summary> private bool TypeSignatureHasVarsNeedingCallingConventionConverter(ref NativeParser parser, TypeSystemContext context, HasVarsInvestigationLevel investigationLevel) { uint data; var kind = parser.GetTypeSignatureKind(out data); switch (kind) { case TypeSignatureKind.External: return(false); case TypeSignatureKind.Variable: return(true); case TypeSignatureKind.Lookback: { var lookbackParser = parser.GetLookbackParser(data); return(TypeSignatureHasVarsNeedingCallingConventionConverter(ref lookbackParser, context, investigationLevel)); } case TypeSignatureKind.Instantiation: { RuntimeTypeHandle genericTypeDef; if (!TryGetTypeFromSimpleTypeSignature(ref parser, out genericTypeDef)) { Debug.Assert(false); return(true); // Returning true will prevent further reading from the native parser } if (!RuntimeAugments.IsValueType(genericTypeDef)) { // Reference types are treated like pointers. No calling convention conversion needed. Just consume the rest of the signature. for (uint i = 0; i < data; i++) { TypeSignatureHasVarsNeedingCallingConventionConverter(ref parser, context, HasVarsInvestigationLevel.Ignore); } return(false); } else { bool result = false; for (uint i = 0; i < data; i++) { result = TypeSignatureHasVarsNeedingCallingConventionConverter(ref parser, context, HasVarsInvestigationLevel.NotParameter) || result; } if ((result == true) && (investigationLevel == HasVarsInvestigationLevel.Parameter)) { if (!TryComputeHasInstantiationDeterminedSize(genericTypeDef, context, out result)) { Environment.FailFast("Unable to setup calling convention converter correctly"); } return(result); } return(result); } } case TypeSignatureKind.Modifier: { // Arrays, pointers and byref types signatures are treated as pointers, not requiring calling convention conversion. // Just consume the parameter type from the stream and return false; TypeSignatureHasVarsNeedingCallingConventionConverter(ref parser, context, HasVarsInvestigationLevel.Ignore); return(false); } case TypeSignatureKind.MultiDimArray: { // No need for a calling convention converter for this case. Just consume the signature from the stream. TypeSignatureHasVarsNeedingCallingConventionConverter(ref parser, context, HasVarsInvestigationLevel.Ignore); uint boundCount = parser.GetUnsigned(); for (uint i = 0; i < boundCount; i++) { parser.GetUnsigned(); } uint lowerBoundCount = parser.GetUnsigned(); for (uint i = 0; i < lowerBoundCount; i++) { parser.GetUnsigned(); } } return(false); case TypeSignatureKind.FunctionPointer: { // No need for a calling convention converter for this case. Just consume the signature from the stream. uint argCount = parser.GetUnsigned(); for (uint i = 0; i < argCount; i++) { TypeSignatureHasVarsNeedingCallingConventionConverter(ref parser, context, HasVarsInvestigationLevel.Ignore); } } return(false); default: parser.ThrowBadImageFormatException(); return(true); } }
bool IExportControl.Run() { var filePath = txtTargetPath.Text; if (string.IsNullOrEmpty(filePath)) { ShowMessageBox(Strings.MessageSelectOutputPathFirst); return(false); } if (File.Exists(filePath) && ShowConfirmBox(Strings.OutputFileExistedWarning) == false) { return(false); } var homePath = InnerApp.GetHomePath(); if (InnerApp.CheckHomeFolder(homePath) == false && ShowConfirmBox(Strings.HomeFolderIsInvalid) == false) { return(false); } var visualStyle = cbVisualStyle.SelectedItem as VisualStyleInfo; if (visualStyle != null) { foreach (var p in visualStyle.Features) { _Features.FirstOrDefault(x => x.Type == p.Key)?.ChangeSelected(_Features, p.Value); } } var levelOfDetail = (cbLevelOfDetail.SelectedItem as ComboItemInfo) ?? _LevelOfDetailDefault; #region 更新界面选项到 _Features void SetFeature(FeatureType featureType, bool selected) { _Features.FirstOrDefault(x => x.Type == featureType)?.ChangeSelected(_Features, selected); } //SetFeature(FeatureType.ExportGrids, cbIncludeGrids.Checked); SetFeature(FeatureType.ExcludeLines, cbExcludeLines.Checked); SetFeature(FeatureType.ExcludePoints, cbExcludeModelPoints.Checked); SetFeature(FeatureType.OnlySelected, cbExcludeUnselectedElements.Checked && _HasSelectElements); SetFeature(FeatureType.UseGoogleDraco, cbUseDraco.Checked); SetFeature(FeatureType.ExtractShell, cbUseExtractShell.Checked); SetFeature(FeatureType.GenerateModelsDb, cbGeneratePropDbSqlite.Checked); SetFeature(FeatureType.ExportSvfzip, cbExportSvfzip.Checked); SetFeature(FeatureType.GenerateThumbnail, cbGenerateThumbnail.Checked); SetFeature(FeatureType.EnableAutomaticSplit, cbEnableAutomaticSplit.Checked); #endregion var isCanncelled = false; using (var session = LicenseConfig.Create()) { if (session.IsValid == false) { LicenseConfig.ShowDialog(session, ParentForm); return(false); } #region 保存设置 var config = _LocalConfig; config.Features = _Features.Where(x => x.Selected).Select(x => x.Type).ToList(); config.LastTargetPath = txtTargetPath.Text; config.AutoOpenAppName = string.Empty; config.VisualStyle = visualStyle?.Key; config.LevelOfDetail = levelOfDetail?.Value ?? -1; _Config.Save(); #endregion var sw = Stopwatch.StartNew(); try { var setting = new ExportSetting(); setting.LevelOfDetail = config.LevelOfDetail; setting.OutputPath = config.LastTargetPath; setting.Features = _Features.Where(x => x.Selected && x.Enabled).Select(x => x.Type).ToList(); setting.Site = ExporterHelper.GetSiteInfo(_View.GetRootModel()) ?? SiteInfo.CreateDefault(); setting.Oem = LicenseConfig.GetOemInfo(InnerApp.GetHomePath()); setting.PreExportSeedFeatures = InnerApp.GetPreExportSeedFeatures(@"glTF"); using (var progress = new ProgressExHelper(this.ParentForm, Strings.MessageExporting)) { var cancellationToken = progress.GetCancellationToken(); try { StartExport(_View, setting, progress.GetProgressCallback(), cancellationToken); } catch (IOException ex) { ShowMessageBox(string.Format(Strings.MessageFileSaveFailure, ex.ToString())); } isCanncelled = cancellationToken.IsCancellationRequested; } sw.Stop(); var ts = sw.Elapsed; ExportDuration = new TimeSpan(ts.Days, ts.Hours, ts.Minutes, ts.Seconds); //去掉毫秒部分 Debug.WriteLine(Strings.MessageOperationSuccessAndElapsedTime, ExportDuration); if (isCanncelled == false) { ShowMessageBox(string.Format(Strings.MessageExportSuccess, ExportDuration)); } } catch (IOException ex) { sw.Stop(); Debug.WriteLine(Strings.MessageOperationFailureAndElapsedTime, sw.Elapsed); ShowMessageBox(string.Format(Strings.MessageFileSaveFailure, ex.Message)); } //catch (Autodesk.Dgn.Exceptions.ExternalApplicationException) //{ // sw.Stop(); // Debug.WriteLine(Strings.MessageOperationFailureAndElapsedTime, sw.Elapsed); // ShowMessageBox(Strings.MessageOperationFailureAndTryLater); //} catch (Exception ex) { sw.Stop(); Debug.WriteLine(Strings.MessageOperationFailureAndElapsedTime, sw.Elapsed); ShowMessageBox(ex.ToString()); } } return(isCanncelled == false); }
private bool CompareTypeSigs(ref NativeParser parser1, ref NativeParser parser2) { // startOffset lets us backtrack to the TypeSignatureKind for external types since the TypeLoader // expects to read it in. uint data1; uint startOffset1 = parser1.Offset; var typeSignatureKind1 = parser1.GetTypeSignatureKind(out data1); // If the parser is at a lookback type, get a new parser for it and recurse. // Since we haven't read the element type of parser2 yet, we just pass it in unchanged if (typeSignatureKind1 == TypeSignatureKind.Lookback) { NativeParser lookbackParser1 = parser1.GetLookbackParser(data1); return(CompareTypeSigs(ref lookbackParser1, ref parser2)); } uint data2; uint startOffset2 = parser2.Offset; var typeSignatureKind2 = parser2.GetTypeSignatureKind(out data2); // If parser2 is a lookback type, we need to rewind parser1 to its startOffset1 // before recursing. if (typeSignatureKind2 == TypeSignatureKind.Lookback) { NativeParser lookbackParser2 = parser2.GetLookbackParser(data2); parser1 = new NativeParser(parser1.Reader, startOffset1); return(CompareTypeSigs(ref parser1, ref lookbackParser2)); } if (typeSignatureKind1 != typeSignatureKind2) { return(false); } switch (typeSignatureKind1) { case TypeSignatureKind.Lookback: { // Recursion above better have removed all lookbacks Debug.Assert(false, "Unexpected lookback type"); return(false); } case TypeSignatureKind.Modifier: { // Ensure the modifier kind (vector, pointer, byref) is the same if (data1 != data2) { return(false); } return(CompareTypeSigs(ref parser1, ref parser2)); } case TypeSignatureKind.Variable: { // variable index is in data if (data1 != data2) { return(false); } break; } case TypeSignatureKind.MultiDimArray: { // rank is in data if (data1 != data2) { return(false); } if (!CompareTypeSigs(ref parser1, ref parser2)) { return(false); } uint boundCount1 = parser1.GetUnsigned(); uint boundCount2 = parser2.GetUnsigned(); if (boundCount1 != boundCount2) { return(false); } for (uint i = 0; i < boundCount1; i++) { if (parser1.GetUnsigned() != parser2.GetUnsigned()) { return(false); } } uint lowerBoundCount1 = parser1.GetUnsigned(); uint lowerBoundCount2 = parser2.GetUnsigned(); if (lowerBoundCount1 != lowerBoundCount2) { return(false); } for (uint i = 0; i < lowerBoundCount1; i++) { if (parser1.GetUnsigned() != parser2.GetUnsigned()) { return(false); } } break; } case TypeSignatureKind.FunctionPointer: { // callingConvention is in data if (data1 != data2) { return(false); } uint argCount1 = parser1.GetUnsigned(); uint argCount2 = parser2.GetUnsigned(); if (argCount1 != argCount2) { return(false); } for (uint i = 0; i < argCount1; i++) { if (!CompareTypeSigs(ref parser1, ref parser2)) { return(false); } } break; } case TypeSignatureKind.Instantiation: { // Type parameter count is in data if (data1 != data2) { return(false); } if (!CompareTypeSigs(ref parser1, ref parser2)) { return(false); } for (uint i = 0; i < data1; i++) { if (!CompareTypeSigs(ref parser1, ref parser2)) { return(false); } } break; } case TypeSignatureKind.External: { RuntimeTypeHandle typeHandle1 = GetExternalTypeHandle(ref parser1, data1); RuntimeTypeHandle typeHandle2 = GetExternalTypeHandle(ref parser2, data2); if (!typeHandle1.Equals(typeHandle2)) { return(false); } break; } default: return(false); } return(true); }
protected void Debug(IList <T> collection) { Diagnostics.WriteLine($"Result: " + GetString(collection)); }
/// <summary> /// Run in background as a daemon. Receive application PIDs to monitor, "stop" to exit. /// </summary> private void DaemonThread() { try { using (var server = new NamedPipeServerStream("UninstallAutomatizerDaemon", PipeDirection.In)) using (var reader = new StreamReader(server)) { while (true) { server.WaitForConnection(); Debug.WriteLine("Client connected through pipe"); while (true) { var line = reader.ReadLine()?.ToLowerInvariant(); Debug.WriteLine("Received through pipe: " + (line ?? "NULL")); if (line == null) { Thread.Sleep(500); continue; } if (line == "stop") { return; } int pid; if (!int.TryParse(line, out pid)) { OnStatusUpdate(new UninstallHandlerUpdateArgs(UninstallHandlerUpdateKind.Normal, string.Format(Localization.UninstallHandler_InvalidProcessNumber, pid))); continue; } try { if (_runningHooks.TryGetValue(pid, out var ttt) && !ttt.IsCompleted) { continue; } var target = Process.GetProcessById(pid); if (!ProcessCanBeAutomatized(target)) { Debug.WriteLine("Tried to automate not allowed process: " + target.ProcessName); continue; } var app = Application.Attach(target); var t = new Task(() => { try { Debug.WriteLine("Running automatizer on thread pool"); AutomatedUninstallManager.AutomatizeApplication(app, AutomatizeStatusCallback); } catch (Exception ex) { OnStatusUpdate(new UninstallHandlerUpdateArgs(UninstallHandlerUpdateKind.Normal, string.Format(Localization.Message_UninstallFailed, ex.InnerException?.Message ?? ex.Message))); } finally { Task tt; _runningHooks.TryRemove(pid, out tt); } }); _runningHooks.AddOrUpdate(pid, t, (i, task) => t); Debug.WriteLine("Created automatizer thread"); t.Start(); } catch (SystemException ex) { Console.WriteLine(ex); } } } } } catch (Exception ex) { OnStatusUpdate(new UninstallHandlerUpdateArgs(UninstallHandlerUpdateKind.Normal, Localization.UninstallHandler_DaemonStoppedReason + (ex.InnerException?.Message ?? ex.Message))); } finally { OnStatusUpdate(new UninstallHandlerUpdateArgs(UninstallHandlerUpdateKind.Succeeded, Localization.Message_Success)); } }
/// <summary> /// Get list of jobs based on the adapter specific /// filter parameters /// </summary> /// <param name="filter">dictionary containing name value /// pairs for adapter specific filters</param> /// <param name="recurse"></param> /// <returns>collection of jobs that match the /// specified criteria</returns> public override IList <Job2> GetJobsByFilter(Dictionary <string, object> filter, bool recurse) { if (filter == null) { throw new ArgumentNullException("filter"); } _tracer.WriteMessage(String.Format(CultureInfo.InvariantCulture, "WorkflowJobSourceAdapter: Getting Workflow jobs by filter: {0}", filter)); PopulateJobRepositoryIfRequired(); // Do not modify the user's collection. Dictionary <string, object> filter2 = new Dictionary <string, object>(filter, StringComparer.CurrentCultureIgnoreCase); bool addPid = false; bool searchParentJobs = true; if (filter2.Keys.Count == 0) { searchParentJobs = false; } else { if (filter2.Keys.Any(key => (((!key.Equals(Constants.JobMetadataSessionId, StringComparison.OrdinalIgnoreCase) && !key.Equals(Constants.JobMetadataInstanceId, StringComparison.OrdinalIgnoreCase)) && !key.Equals(Constants.JobMetadataName, StringComparison.OrdinalIgnoreCase)) && !key.Equals(Constants.JobMetadataCommand, StringComparison.OrdinalIgnoreCase)) && !key.Equals(Constants.JobMetadataFilterState, StringComparison.OrdinalIgnoreCase))) { searchParentJobs = false; } } List <Job2> jobs = new List <Job2>(); // search container parent jobs first if (searchParentJobs) { List <ContainerParentJob> repositoryJobs = _jobRepository.GetItems(); List <Job2> searchList = SearchJobsOnV2Parameters(repositoryJobs, filter2); repositoryJobs.Clear(); if (searchList.Count > 0) { jobs.AddRange(searchList); } } if (recurse) { // If the session Id parameter is present, make sure that the Id match is valid by adding the process Id to the filter. if (filter2.ContainsKey(Constants.JobMetadataSessionId)) { addPid = true; } if (addPid) { filter2.Add(Constants.JobMetadataPid, Process.GetCurrentProcess().Id); } if (filter2.ContainsKey(Constants.JobMetadataFilterState)) { filter2.Remove(Constants.JobMetadataFilterState); } LoadWorkflowInstancesFromStore(); // remove state from filter here and do it separately IEnumerable <Job2> workflowInstances = GetJobManager().GetJobs(WorkflowFilterTypes.All, filter2); if (filter.ContainsKey(Constants.JobMetadataFilterState)) { JobState searchState = (JobState) LanguagePrimitives.ConvertTo(filter[Constants.JobMetadataFilterState], typeof(JobState), CultureInfo.InvariantCulture); var list = workflowInstances.Where(job => job.JobStateInfo.State == searchState).ToList(); jobs.AddRange(list); } else { jobs.AddRange(workflowInstances); } } List <Job2> cpjs = new List <Job2>(); foreach (var job in jobs) { if (job is ContainerParentJob && !cpjs.Contains(job)) { cpjs.Add(job); continue; } PSWorkflowJob wfj = job as PSWorkflowJob; Dbg.Assert(wfj != null, "if it's not a containerparentjob, it had better be a workflowjob"); ContainerParentJob cpj = _jobRepository.GetItem((Guid)wfj.JobMetadata[Constants.JobMetadataParentInstanceId]); if (!cpjs.Contains(cpj)) { cpjs.Add(cpj); } } return(cpjs); }
public static void Ref(object reference) { SDebug.Assert(reference != null, "reference is null"); }
public static void Assert([AssertionCondition(AssertionConditionType.IS_TRUE)] bool condition) { SysDebug.Assert(condition); }
public static void Write(object value) { DBG.Write(value); }