public void ReducerLoadsFilesAssociatedWithItsKey() { var keys = CreateTwoKeyFileSet(this.storage); var reducer = new Reducer(keys[0], null, this.storage); int loadedFileCount = reducer.LoadedFileCount; loadedFileCount.ShouldBe(3); }
public void ReducerPerformsReduceOnLoadedFilesUsingExternalCode() { var keys = CreateTwoKeyFileSet(this.storage); TestHelpers.LoadToStorage(@"..\..\SampleReducer.cs", new FileUri("file:///SampleReducer.cs"), this.storage); var reduceProvider = Loader.Load<IReduceProvider>("SampleReducer.cs", this.storage); var reducer = new Reducer(keys[0], reduceProvider.Reduce, this.storage); var res = reducer.PerformReduce(); res.Value.ShouldBe("3"); }
public void ReducerPerformsReduceOnLoadedFiles() { var keys = CreateTwoKeyFileSet(this.storage); var reducer = new Reducer(keys[0], (key, values) => { int result = 0; foreach (var value in values) { result += int.Parse(value); } return new KeyValuePair<string, string>(key, result.ToString()); }, this.storage); var res = reducer.PerformReduce(); res.Value.ShouldBe("3"); }
/// <summary>Get the factor to convert to the unit of measure</summary> /// /// <param name="targetUOM">Target unit of measure</param> /// /// <returns> conversion factor</returns> /// public double GetConversionFactor(UnitOfMeasure targetUOM) { if (targetUOM == null) { throw new Exception(MeasurementSystem.GetMessage("unit.cannot.be.null")); } // first check the cache if (ConversionRegistry.TryGetValue(targetUOM, out double cachedFactor)) { return(cachedFactor); } CheckTypes(this, targetUOM); Reducer fromPowerMap = GetReducer(); Reducer toPowerMap = targetUOM.GetReducer(); Dictionary <UnitOfMeasure, int> fromMap = fromPowerMap.Terms; Dictionary <UnitOfMeasure, int> toMap = toPowerMap.Terms; if (fromMap.Count != toMap.Count) { string msg = String.Format(MeasurementSystem.GetMessage("incompatible.units"), this, targetUOM); throw new Exception(msg); } double fromFactor = fromPowerMap.MapScalingFactor; double toFactor = toPowerMap.MapScalingFactor; double factor = 1; // compute map factor int matchCount = 0; foreach (KeyValuePair <UnitOfMeasure, int> fromEntry in fromMap) { UnitType fromType = fromEntry.Key.UOMType; UnitOfMeasure fromUOM = fromEntry.Key; int fromPower = fromEntry.Value; foreach (KeyValuePair <UnitOfMeasure, int> toEntry in toMap) { UnitType toType = toEntry.Key.UOMType; if (fromType.Equals(toType)) { matchCount++; UnitOfMeasure toUOM = toEntry.Key; double bd = fromUOM.ConvertScalarToScalar(toUOM); bd = Math.Pow(bd, fromPower); factor = factor * bd; break; } } // to map } // from map if (matchCount != fromMap.Count) { string msg = String.Format(MeasurementSystem.GetMessage("incompatible.units"), this, targetUOM); throw new Exception(msg); } cachedFactor = factor * (fromFactor / toFactor); // cache it ConversionRegistry[targetUOM] = cachedFactor; return(cachedFactor); }
//TODO: Implement IBuilder interface and pass in Builder to decouple. public Store(TState initialState, Reducer <TState> rootReducer) { State = initialState; _rootReducer = rootReducer; _dispatch = InitialDispatch; }
public Store(Reducer <State> rootReducer) { store = new BasicStore(rootReducer); Middleware(); }
/// <summary> /// 分流器 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="composer"></param> /// <param name="reducer"></param> /// <returns></returns> public ComposableReducer <State> Diverter <T>(Expression <Func <State, T> > composer, Reducer <T> reducer) { var memberExpr = composer.Body as MemberExpression; if (memberExpr == null) { throw new ArgumentException(string.Format( "Expression '{0}' should be a field.", composer.ToString())); } var member = (FieldInfo)memberExpr.Member; if (member == null) { throw new ArgumentException(string.Format( "Expression '{0}' should be a constant expression", composer.ToString())); } fieldReducers.Add(new Tuple <FieldInfo, Delegate>(member, reducer)); return(this); }
/// <summary> /// compute SDF for the scan object, and then compute offset iso-contours /// </summary> void compute_offset_meshes() { int sdf_cells = 128; int mesh_cells = 128; double max_offset = inner_offset + thickness; if (max_offset > cached_sdf_max_offset) { DMesh3 meshIn = new DMesh3(MeshSource.GetIMesh(), MeshHints.IsCompact, MeshComponents.None); MeshTransforms.FromFrame(meshIn, cachedInputsTransform); // [RMS] reduce this mesh? speeds up SDF quite a bit... Reducer r = new Reducer(meshIn); r.ReduceToTriangleCount(2500); double cell_size = meshIn.CachedBounds.MaxDim / sdf_cells; int exact_cells = (int)((max_offset) / cell_size) + 1; MeshSignedDistanceGrid sdf = new MeshSignedDistanceGrid(meshIn, cell_size) { ExactBandWidth = exact_cells }; sdf.Compute(); cached_sdf = sdf; cached_sdf_max_offset = max_offset; cached_sdf_bounds = meshIn.CachedBounds; cached_inner_sdf_offset = 0; cached_outer_sdf_offset = 0; } if (cached_inner_sdf_offset != inner_offset || cached_outer_sdf_offset != max_offset) { var iso = new DenseGridTrilinearImplicit(cached_sdf.Grid, cached_sdf.GridOrigin, cached_sdf.CellSize); MarchingCubes c = new MarchingCubes() { Implicit = iso }; c.Bounds = cached_sdf_bounds; c.CubeSize = c.Bounds.MaxDim / mesh_cells; c.Bounds.Expand(max_offset + 3 * c.CubeSize); if (cached_inner_sdf_offset != inner_offset) { c.IsoValue = inner_offset; c.Generate(); InnerOffsetMesh = c.Mesh; Reducer reducer = new Reducer(InnerOffsetMesh); reducer.ReduceToEdgeLength(c.CubeSize / 2); InnerOffsetMeshSpatial = new DMeshAABBTree3(InnerOffsetMesh, true); cached_inner_sdf_offset = inner_offset; } if (cached_outer_sdf_offset != max_offset) { c.IsoValue = inner_offset + thickness; c.Generate(); OuterOffsetMesh = c.Mesh; Reducer reducer = new Reducer(OuterOffsetMesh); reducer.ReduceToEdgeLength(c.CubeSize / 2); OuterOffsetMeshSpatial = new DMeshAABBTree3(OuterOffsetMesh, true); cached_outer_sdf_offset = max_offset; } } //Util.WriteDebugMesh(MeshSource.GetIMesh(), "c:\\scratch\\__OFFESTS_orig.obj"); //Util.WriteDebugMesh(InnerOffsetMesh, "c:\\scratch\\__OFFESTS_inner.obj"); //Util.WriteDebugMesh(OuterOffsetMesh, "c:\\scratch\\__OFFESTS_outer.obj"); }
public void RunReduce(int reduce, bool pipedOutput) { reducer = factory.CreateReducer(this); writer = factory.CreateRecordWriter(this); hasTask = true; }
protected virtual void compute_shell_distancefield_unsigned() { double offset_distance = shell_thickness * 0.5; if (cached_sdf == null || offset_distance > cached_sdf_max_offset || grid_cell_size != cached_sdf.CellSize) { DMesh3 meshIn = MeshSource.GetDMeshUnsafe(); int exact_cells = (int)((offset_distance) / grid_cell_size) + 1; MeshSignedDistanceGrid sdf = new MeshSignedDistanceGrid(meshIn, grid_cell_size) { ExactBandWidth = exact_cells, ComputeSigns = false }; sdf.CancelF = is_invalidated; sdf.Compute(); if (is_invalidated()) { return; } cached_sdf = sdf; cached_sdf_max_offset = offset_distance; cached_sdf_bounds = meshIn.CachedBounds; } var iso = new DenseGridTrilinearImplicit(cached_sdf.Grid, cached_sdf.GridOrigin, cached_sdf.CellSize); MarchingCubes c = new MarchingCubes(); c.Implicit = iso; c.IsoValue = offset_distance; c.Bounds = cached_sdf_bounds; c.CubeSize = mesh_cell_size; c.Bounds.Expand(offset_distance + 3 * c.CubeSize); c.RootMode = MarchingCubes.RootfindingModes.LerpSteps; c.RootModeSteps = 5; c.CancelF = is_invalidated; c.Generate(); if (is_invalidated()) { return; } Reducer r = new Reducer(c.Mesh); r.FastCollapsePass(c.CubeSize * 0.5, 3, true); if (is_invalidated()) { return; } if (min_component_volume > 0) { MeshEditor.RemoveSmallComponents(c.Mesh, min_component_volume, min_component_volume); } if (is_invalidated()) { return; } ResultMesh = c.Mesh; }
public void AddStateSlice <TState>(string name, TState initialState, Reducer <TState> reducer) { _stateSlicesByName.Add(name, new StateSlice <TState>(initialState, reducer)); }
public bool SetMapReduce(Mapper mapBlock, Reducer reduceBlock, string version) { System.Diagnostics.Debug.Assert((mapBlock != null)); System.Diagnostics.Debug.Assert((version != null)); this.mapBlock = mapBlock; this.reduceBlock = reduceBlock; if (!database.Open()) { return false; } // Update the version column in the database. This is a little weird looking // because we want to // avoid modifying the database if the version didn't change, and because the // row might not exist yet. SQLiteStorageEngine storageEngine = this.database.GetDatabase(); // Older Android doesnt have reliable insert or ignore, will to 2 step // FIXME review need for change to execSQL, manual call to changes() string sql = "SELECT name, version FROM views WHERE name=?"; string[] args = new string[] { name }; Cursor cursor = null; try { cursor = storageEngine.RawQuery(sql, args); if (!cursor.MoveToNext()) { // no such record, so insert ContentValues insertValues = new ContentValues(); insertValues.Put("name", name); insertValues.Put("version", version); storageEngine.Insert("views", null, insertValues); return true; } ContentValues updateValues = new ContentValues(); updateValues.Put("version", version); updateValues.Put("lastSequence", 0); string[] whereArgs = new string[] { name, version }; int rowsAffected = storageEngine.Update("views", updateValues, "name=? AND version!=?" , whereArgs); return (rowsAffected > 0); } catch (SQLException e) { Log.E(Log.TagView, "Error setting map block", e); return false; } finally { if (cursor != null) { cursor.Close(); } } }
public virtual void Update() { base.begin_update(); int start_timestamp = this.CurrentInputTimestamp; if (MeshSource == null) { throw new Exception("GenerateGraphSupportsOp: must set valid MeshSource to compute!"); } try { ResultMesh = null; DMesh3 mesh = MeshSource.GetDMeshUnsafe(); GraphSupportGenerator supportgen = new GraphSupportGenerator(mesh, get_cached_spatial(), GridCellSize); supportgen.OverhangAngleDeg = this.overhang_angle; supportgen.ForceMinY = (float)this.min_y; supportgen.ProcessBottomUp = this.bottom_up; supportgen.OverhangAngleOptimizeDeg = this.support_min_angle; supportgen.OptimizationRounds = this.optimize_rounds; supportgen.GraphSurfaceDistanceOffset = this.post_diam / 2 + surface_offset_distance; supportgen.Progress = new ProgressCancel(is_invalidated); supportgen.Generate(); DGraph3 graph = supportgen.Graph; if (is_invalidated()) { goto skip_to_end; } GraphTubeMesher mesher = new GraphTubeMesher(supportgen); mesher.TipRadius = this.tip_diam / 2; mesher.PostRadius = this.post_diam / 2; mesher.GroundRadius = this.base_diam / 2; mesher.SamplerCellSizeHint = supportgen.CellSize / 2; mesher.Progress = new ProgressCancel(is_invalidated); mesher.Generate(); if (is_invalidated()) { goto skip_to_end; } ResultMesh = mesher.ResultMesh; Reducer reducer = new Reducer(ResultMesh); reducer.Progress = new ProgressCancel(is_invalidated); reducer.ReduceToEdgeLength(mesher.ActualCellSize / 2); skip_to_end: if (is_invalidated()) { ResultMesh = null; } base.complete_update(); } catch (Exception e) { PostOnOperatorException(e); ResultMesh = base.make_failure_output(MeshSource.GetDMeshUnsafe()); base.complete_update(); } }
protected virtual DMesh3 update_step_2(DMesh3 meshIn) { double unsigned_offset = Math.Abs(distance); int exact_cells = (int)(unsigned_offset / grid_cell_size) + 1; // only use spatial DS if we are computing enough cells bool compute_spatial = GenerateClosedMeshOp.MeshSDFShouldUseSpatial( input_spatial, exact_cells, grid_cell_size, input_mesh_edge_stats.z) != null; DMeshAABBTree3 use_spatial = (compute_spatial) ? new DMeshAABBTree3(meshIn, true) : null; MeshSignedDistanceGrid sdf = new MeshSignedDistanceGrid(meshIn, grid_cell_size, use_spatial) { ExactBandWidth = exact_cells }; if (use_spatial != null) { sdf.NarrowBandMaxDistance = unsigned_offset + grid_cell_size; sdf.ComputeMode = MeshSignedDistanceGrid.ComputeModes.NarrowBand_SpatialFloodFill; } sdf.CancelF = is_invalidated; sdf.Compute(); if (is_invalidated()) { return(null); } var iso = new DenseGridTrilinearImplicit(sdf.Grid, sdf.GridOrigin, sdf.CellSize); MarchingCubes c = new MarchingCubes(); c.Implicit = iso; if (op_type == OperationTypes.Close) { c.IsoValue = -distance; } else { c.IsoValue = distance; } c.Bounds = cached_sdf_bounds; c.CubeSize = mesh_cell_size; c.Bounds.Expand(distance + 3 * c.CubeSize); c.RootMode = MarchingCubes.RootfindingModes.LerpSteps; c.RootModeSteps = 5; c.CancelF = is_invalidated; c.Generate(); if (is_invalidated()) { return(null); } Reducer r = new Reducer(c.Mesh); r.FastCollapsePass(c.CubeSize * 0.5, 3, true); if (is_invalidated()) { return(null); } if (min_component_volume > 0) { MeshEditor.RemoveSmallComponents(c.Mesh, min_component_volume, min_component_volume); } if (is_invalidated()) { return(null); } return(c.Mesh); }
public _AsyncTask_965(ReplicationFilter filter, Validator validation, Mapper map, Reducer reduce) { this.filter = filter; this.validation = validation; this.map = map; this.reduce = reduce; }
public static Store <S, A> Store <S, A>(Reducer <S, A> reducer, S state, params Func <Func <S>, Action <A>, Func <Action <A>, Action <A> > >[] middleware) => new Store <S, A>(reducer, state, middleware);
public AppStore(Reducer <AppState> reducer, AppState initialState = null, params IMiddleware[] middleware) : base(reducer, initialState, middleware) { }
protected virtual DMesh3 compute_blend_bounded() { bool profile = true; LocalProfiler p = null; if (profile) { p = new LocalProfiler(); p.Start("sdf"); } cache_input_sdfs_bounded(); if (is_invalidated()) { return(null); } if (profile) { p.Stop("sdf"); p.Start("mc"); } List <BoundedImplicitFunction3d> inputs = new List <BoundedImplicitFunction3d>(); foreach (var sdf in cached_bounded_sdfs) { var dist_field = new DenseGridTrilinearImplicit(sdf); var skel_field = new DistanceFieldToSkeletalField() { DistanceField = dist_field, FalloffDistance = blend_falloff }; inputs.Add(skel_field); } SkeletalRicciNaryBlend3d blend = new SkeletalRicciNaryBlend3d() { Children = inputs, BlendPower = this.blend_power }; MarchingCubesPro c = new MarchingCubesPro(); c.Implicit = blend; c.IsoValue = DistanceFieldToSkeletalField.ZeroIsocontour; c.Bounds = blend.Bounds(); c.CubeSize = mesh_cell_size; c.Bounds.Expand(3 * c.CubeSize); c.RootMode = MarchingCubesPro.RootfindingModes.LerpSteps; c.RootModeSteps = 3; c.CancelF = is_invalidated; //c.Generate(); c.GenerateContinuation(input_mesh_seeds()); if (is_invalidated()) { return(null); } if (profile) { p.Stop("mc"); p.Start("reduce"); } c.Mesh.ReverseOrientation(); Reducer r = new Reducer(c.Mesh); r.FastCollapsePass(c.CubeSize / 2, 3, true); if (is_invalidated()) { return(null); } if (min_component_volume > 0) { MeshEditor.RemoveSmallComponents(c.Mesh, min_component_volume, min_component_volume); } if (is_invalidated()) { return(null); } if (profile) { p.Stop("reduce"); #if G3_USING_UNITY UnityEngine.Debug.Log("BLEND TIME: " + p.AllTimes()); #endif } return(c.Mesh); }
static void Main(string[] args) { var _staticColumnCount = 2; //Columns that should not be pivoted var _dynamicColumnCount = 2; // Columns which needs to be pivoted to form header var _valueColumnCount = 1; //Columns that represent Actual value var valueColumnIndex = 4; //Assuming index starts with 0; List <List <string> > headerInfo = new List <List <string> >(); headerInfo.Add(new List <string> { "Product Three", "Item Three" }); headerInfo.Add(new List <string> { "Product Two", "Item Five" }); headerInfo.Add(new List <string> { "Product Two", "Item Seven" }); headerInfo.Add(new List <string> { "Product Two", "Item Nine" }); headerInfo.Add(new List <string> { "Product One", "Item One" }); headerInfo.Add(new List <string> { "Product One", "Item Two" }); headerInfo.Add(new List <string> { "Product One", "Item Four" }); headerInfo.Add(new List <string> { "Product One", "Item Six" }); headerInfo.Add(new List <string> { "Product One", "Item Eight" }); headerInfo.Add(new List <string> { "Product One", "Item Eleven" }); headerInfo.Add(new List <string> { "Product Three", "Item Ten" }); List <List <string> > data = new List <List <string> >(); data.Add(new List <string> { "Global", "Europe", "Product One", "Item One", "579984.59" }); data.Add(new List <string> { "Global", "North America", "Product One", "Item Two", "314586.73" }); data.Add(new List <string> { "Global", "Asia", "Product One", "Item One", "62735.13" }); data.Add(new List <string> { "Global", "Asia", "Product Two", "Item Five", "12619234.69" }); data.Add(new List <string> { "Global", "North America", "Product Two", "Item Five", "8953713.39" }); data.Add(new List <string> { "Global", "Europe", "Product One", "Item Two", "124267.4" }); data.Add(new List <string> { "Global", "Asia", "Product One", "Item Four", "482338.49" }); data.Add(new List <string> { "Global", "North America", "Product One", "Item Four", "809185.13" }); data.Add(new List <string> { "Global", "Europe", "Product One", "Item Four", "233101" }); data.Add(new List <string> { "Global", "Asia", "Product One", "Item Two", "120561.65" }); data.Add(new List <string> { "Global", "North America", "Product One", "Item Six", "1517359.37" }); data.Add(new List <string> { "Global", "Europe", "Product One", "Item Six", "382590.45" }); data.Add(new List <string> { "Global", "North America", "Product One", "Item Eight", "661835.64" }); data.Add(new List <string> { "Global", "Europe", "Product Three", "Item Three", "0" }); data.Add(new List <string> { "Global", "Europe", "Product One", "Item Eight", "0" }); data.Add(new List <string> { "Global", "Europe", "Product Two", "Item Five", "3478145.38" }); data.Add(new List <string> { "Global", "Asia", "Product One", "Item Six", "0" }); data.Add(new List <string> { "Global", "North America", "Product Two", "Item Seven", "4247059.97" }); data.Add(new List <string> { "Global", "Asia", "Product Two", "Item Seven", "2163718.01" }); data.Add(new List <string> { "Global", "Europe", "Product Two", "Item Seven", "2158782.48" }); data.Add(new List <string> { "Global", "North America", "Product Two", "Item Nine", "72634.46" }); data.Add(new List <string> { "Global", "Europe", "Product Two", "Item Nine", "127500" }); data.Add(new List <string> { "Global", "North America", "Product One", "Item One", "110964.44" }); data.Add(new List <string> { "Global", "Asia", "Product Three", "Item Ten", "2064.99" }); data.Add(new List <string> { "Global", "Europe", "Product One", "Item Eleven", "0" }); data.Add(new List <string> { "Global", "Asia", "Product Two", "Item Nine", "1250" }); Reducer reducer = new Reducer(); reducer.headerCount = headerInfo.Count; reducer.headerCount = headerInfo.Count; var resultCount = (int)Math.Ceiling((double)data.Count / (double)reducer.headerCount); ValueArray[,] results = new ValueArray[resultCount, _staticColumnCount + reducer.headerCount]; reducer.headerDict = new Dictionary <IEnumerable <string>, int>(new MyComparer()); reducer.skipCols = _staticColumnCount; reducer.headerKeys = _dynamicColumnCount; reducer.rowDict = new Dictionary <IEnumerable <string>, int>(new MyComparer()); reducer.currentLine = 0; reducer.valueCount = _valueColumnCount; for (int i = 0; i < reducer.headerCount; i++) { reducer.headerDict.Add(headerInfo[i], i); } Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); results = data.Aggregate(results, reducer.reduce); stopwatch.Stop(); Console.WriteLine("millisecs: " + stopwatch.ElapsedMilliseconds); for (int i = 0; i < resultCount; i++) { var curr_header = new string[reducer.headerCount]; IEnumerable <string> curr_key = null; for (int j = 0; j < reducer.headerCount; j++) { curr_header[j] = "[" + String.Join(",", (results[i, reducer.skipCols + j]?.values) ?? new string[0]) + "]"; curr_key = curr_key ?? (results[i, reducer.skipCols + j]?.row_keys); } Console.WriteLine(String.Join(",", curr_key) + ": " + String.Join(",", curr_header) ); } Console.ReadKey(); }
public static Store <TState> AddRealmStore <TState>(this IServiceCollection services, TState initialState, Reducer <TState> rootReducer) { Store <TState> store = new Store <TState>(initialState, rootReducer); services.AddSingleton <Store <TState> >(store); //IServiceProvider serviceProvider = services.BuildServiceProvider(); //IStoreBuilder<TState> builder = new StoreBuilder<TState>(serviceProvider); services.AddSingleton <IStoreBuilder <TState>, StoreBuilder <TState> >(/*builder*/); return(store); }
public void RunMap(string inputSplit, int numReducers, bool pipedInputs) { this.inputSplit = inputSplit; reader = factory.CreateRecordReader(this); if (reader != null) { valuee = new byte[0]; } mapper = factory.CreateMapper(this); this.numReducers = numReducers; if (numReducers != 0) { reducer = factory.CreateCombiner(this); partitioner = factory.CreatePartitioner(this); } if (reducer != null) { long spillSize = 100; if (jobConf.ContainsKey("io.sort.mb")) { spillSize = Convert.ToInt64(jobConf["io.sort.mb"]); } // TODO writer = new CombineRunner ( } hasTask = true; }
protected virtual DMesh3 compute_blend_analytic() { bool profile = true; LocalProfiler p = null; if (profile) { p = new LocalProfiler(); p.Start("bvtree"); } compute_cache_lazy_sdfs(); if (is_invalidated()) { return(null); } if (profile) { p.Stop("bvtree"); p.Start("mc"); } List <BoundedImplicitFunction3d> inputs = new List <BoundedImplicitFunction3d>(); foreach (CachingMeshSDF sdf in cached_lazy_sdfs) { var skel_field = new DistanceFieldToSkeletalField() { DistanceField = new CachingMeshSDFImplicit(sdf), FalloffDistance = blend_falloff }; inputs.Add(skel_field); } SkeletalRicciNaryBlend3d blend = new SkeletalRicciNaryBlend3d() { Children = inputs, BlendPower = this.blend_power, FieldShift = -DistanceFieldToSkeletalField.ZeroIsocontour }; AxisAlignedBox3d use_bounds = source_bounds; source_bounds.Expand(blend_falloff); MarchingCubesPro c = lazy_mc; c.Implicit = blend; //c.IsoValue = DistanceFieldToSkeletalField.ZeroIsocontour; c.Bounds = use_bounds; c.CubeSize = mesh_cell_size; c.Bounds.Expand(3 * c.CubeSize); c.RootMode = MarchingCubesPro.RootfindingModes.LerpSteps; c.RootModeSteps = 3; //c.ParallelCompute = false; c.CancelF = is_invalidated; c.GenerateContinuation(input_mesh_seeds()); if (is_invalidated()) { return(null); } if (profile) { p.Stop("mc"); p.Start("reduce"); } c.Mesh.ReverseOrientation(); Reducer r = new Reducer(c.Mesh); r.FastCollapsePass(c.CubeSize / 2, 3, true); if (is_invalidated()) { return(null); } if (min_component_volume > 0) { MeshEditor.RemoveSmallComponents(c.Mesh, min_component_volume, min_component_volume); } if (is_invalidated()) { return(null); } if (profile) { p.Stop("reduce"); #if G3_USING_UNITY UnityEngine.Debug.Log("ANALYTIC BLEND TIME: " + p.AllTimes()); #endif } return(c.Mesh); }
public Store(Reducer <TAppState> reducer, TAppState initialState = default) : base(initialState) { _reducer = reducer; }
public TimeMachineStore(Reducer <TState> reducer, TState initialState = default(TState), params Middleware <TState>[] middlewares) : base(new TimeMachineReducer((state, action) => reducer((TState)state, action)).Execute, new TimeMachineState(initialState), WrapMiddlewares(middlewares)) { }
public StateStore(Reducer <S, A> reducer, S initial) { this.reducer = reducer; this.subscribers = new List <Subscriber <S> >(); this.state = initial; }
private Expression RewriteBody(ParameterExpression stateVar, ParameterExpression builderVar, ParameterExpression stateMachineVar, out IEnumerable <ParameterExpression> variables) { const int ExprCount = 1 /* local state var */ + 1 /* TryCatch */ + 2 /* state = -2; SetResult */ + 1 /* Label */; var locals = default(ParameterExpression[]); var exprs = default(Expression[]); var result = default(ParameterExpression); var ex = Expression.Parameter(typeof(Exception), "exception"); var exit = Expression.Label("__exit"); // // Keep a collection and a helper function to create variables that are hoisted to the heap // for use by await sites. Because only one await site can be active at a time, we can reuse // variables introduced for these, e.g. for awaiters of the same type. // // NB: We can replace the getVariable helper function with a local function in C# 7.0 if we // get that feature. // var hoistedVars = new Dictionary <Type, ParameterExpression>(); var getVariable = new Func <Type, string, ParameterExpression>((t, s) => { if (!hoistedVars.TryGetValue(t, out ParameterExpression p)) { p = Expression.Parameter(t, s + hoistedVars.Count); hoistedVars.Add(t, p); } return(p); }); // // Some helpers to call AwaitOnCompleted on the async method builder for use by each await site in // the asynchronous code path, e.g. // // if (!awaiter.IsCompleted) // { // __state = n; // __builder.AwaitOnCompleted<AwaiterType, RuntimeAsyncStateMachine>(ref awaiter, ref __statemachine); // } // // NB: We can replace the onCompletedFactory helper function with a local function in C# 7.0 if we // get that feature. // // REVIEW: Do we have any option to call UnsafeAwaitOnCompleted at runtime, i.e. can we detect // the cases where we can do this and can we do it wrt security restrictions on code // that gets emitted dynamically? // var awaitOnCompletedMethod = builderVar.Type.GetMethod("AwaitOnCompleted", BindingFlags.Public | BindingFlags.Instance); var awaitOnCompletedArgs = new Type[] { default(Type), typeof(RuntimeAsyncStateMachine) }; var onCompletedFactory = new Func <Expression, Expression>(awaiter => { awaitOnCompletedArgs[0] = awaiter.Type; var awaitOnCompletedMethodClosed = awaitOnCompletedMethod.MakeGenericMethod(awaitOnCompletedArgs); return(Expression.Call(builderVar, awaitOnCompletedMethodClosed, awaiter, stateMachineVar)); }); // // First, reduce all nodes in the body except for await nodes. This makes subsequent rewrite // steps easier because we reduce to the known subset of LINQ nodes. // var reduced = Reducer.Reduce(Body); // // Next, rewrite exception handlers to synthetic equivalents where needed. This supports the // C# 6.0 features to await in catch and finally handlers (in addition to fault handlers in // order to support all LINQ nodes, which can be restricted if we want). // // This step also deals with pending branches out of exception handlers in order to properly // 'leave' protected regions and execute the branch after the exception handling construct. // var lowered = new CatchRewriter().Visit(reduced); lowered = new FinallyAndFaultRewriter().Visit(lowered); // // Next, eliminate any aliasing of variables that relies on the nesting of scoped nodes in // the LINQ APIs (e.g. nested blocks with reused ParmeterExpression nodes). We do this so we // don't have to worry about hoisting variables out of the async lambda body and causing the // meaning of the hoisted variable to change to another use of the same variable in a scoped // tree node higher up. This can happen during stack spilling, e.g. // // { // int x; // @0 // { // int x; // @0 - same instance shadowing x in outer block // F(x, await t); // } // } // // ==> // // int x; // @0 hoisted to heap by stack spilling // () => // { // int x; // !!! the binding of x has now changed to the declaration // __spill0 = x; // !!! in the inner block // __spill1 = await t; // F(__spill0, __spill1); // } // var aliasFree = AliasEliminator.Eliminate(lowered); // // Next, perform stack spilling in order to be able to pause the asynchronous method in the // middle of an expression without changing the left-to-right subexpression evaluation // semantics dictated by the C# language specification, e.g. // // Console.ReadLine() + await Task.FromResult(Console.ReadLine) // // The first side-effect of reading from the console should happen before the second one // in the async operation. // var spilled = Spiller.Spill(aliasFree); // // Next, rewrite await expressions to the awaiter pattern with IsCompleted, OnCompleted, // and GetResult. This is where the heavy lifting (quite literally so) takes place and the // state machine is built. Other than rewriting await expressions, this step also takes care // of emitting the switch table for reentering the state machine, reentering nested try // blocks, and hoisting of locals. For more information, see AwaitRewriter. // // Note we need to introduce another local to keep the state of the async state machine in // order to deal with reentrancy of the async state machine via the OnCompleted call on an // awaiter while we're still exiting the state machine. This is a subtle race which we avoid // by making all decisions about jumps and state transitions based on a local copy of the // hoisted state variable used by the state machine: // // int __localState = __state; // switch (__localState) // { // ... // } // // NB: Right now, locals used in await sites get hoisted to the heap eagerly rather than // getting hoisted upon taking the asynchronous code path. This is an opportunity for // future optimization, together with the use of a struct for the async state machine. // var localStateVar = Expression.Parameter(typeof(int), "__localState"); var awaitRewriter = new AwaitRewriter(localStateVar, stateVar, getVariable, onCompletedFactory, exit); var rewrittenBody = awaitRewriter.Visit(spilled); // // Next, store the result of the rewritten body if the async method is non-void-returning. // Note this assignment will typically have a RHS which contains a non-void block expression // that originated from running the AwaitRewriter. // var newBody = rewrittenBody; if (Body.Type != typeof(void) && builderVar.Type.IsGenericType /* if not ATMB<T>, no result assignment needed */) { result = Expression.Parameter(Body.Type, "__result"); newBody = Expression.Assign(result, rewrittenBody); locals = new[] { localStateVar, result }; } else { locals = new[] { localStateVar }; } exprs = new Expression[ExprCount]; // // Next, we need to rewrite branching involving typed labels and percolate assignments in // order to avoid reduced await expressions causing branching into non-void expressions // which is not allowed in the lambda compiler. An example os this is shown in the comments // for AssignmentPercolator. // newBody = new TypedLabelRewriter().Visit(newBody); newBody = AssignmentPercolator.Percolate(newBody); var i = 0; // // Next, put the jump table to resume the async state machine on top of the rewritten body // returned from the AwaitRewriter. Note that the AwaitRewriter takes care of emitting the // nested resume jump tables for try statements, so we just have to stick the top-level // table around the body here. We don't do this in AwaitRewriter just to reduce the amount // of expression tree cloning incurred by TypedLabelRewriter and AssignmentPercolator given // that we know the switch tables don't contain any expressions that need such rewriting. // var resumeList = awaitRewriter.ResumeList; if (resumeList.Count > 0) { newBody = Expression.Block( typeof(void), Expression.Switch(stateVar, resumeList.ToArray()), newBody ); } else { newBody = Helpers.CreateVoid(newBody); } // // int __localState = __state; // exprs[i++] = Expression.Assign(localStateVar, stateVar); // // try // { // // body // } // catch (Exception ex) // { // __state = -2; // __builder.SetException(ex); // goto __exit; // } // exprs[i++] = Expression.TryCatch( newBody, Expression.Catch(ex, Expression.Block( Expression.Assign(stateVar, Helpers.CreateConstantInt32(-2)), Expression.Call(builderVar, builderVar.Type.GetMethod("SetException"), ex), Expression.Return(exit) ) ) ); // // __state = -2; // exprs[i++] = Expression.Assign(stateVar, Helpers.CreateConstantInt32(-2)); // // __builder.SetResult(__result); // if (result != null) { exprs[i++] = Expression.Call(builderVar, builderVar.Type.GetMethod("SetResult"), result); } else { exprs[i++] = Expression.Call(builderVar, builderVar.Type.GetMethod("SetResult")); } // // __exit: // return; // exprs[i++] = Expression.Label(exit); // // Finally, create the Action with the rewritten async lambda body that gets passed to the // runtime async state machine and hoist any newly introduced variables for awaiters and // such to the outer scope in order to get them stored on the heap rather than the stack. // var body = Expression.Block(locals, exprs); var res = Expression.Lambda <Action>(body); variables = hoistedVars.Values.Concat(awaitRewriter.HoistedVariables); return(res); }
public static void test_reduce_profiling() { LocalProfiler p = new LocalProfiler(); p.Start("load"); //DMesh3 mesh = TestUtil.LoadTestMesh("c:\\scratch\\bunny_solid.obj"); //DMesh3 mesh = TestUtil.LoadTestMesh("C:\\scratch\\current_test\\g3sharp_user_OBJ\\OBJ\\dizhi.obj"); //DMesh3 mesh = TestUtil.LoadTestMesh("C:\\scratch\\current_test\\g3sharp_user_OBJ\\exported.obj"); //DMesh3 mesh = TestUtil.LoadTestMesh("c:\\scratch\\bunny_open.obj"); DMesh3 loadMesh = TestUtil.LoadTestMesh("c:\\scratch\\ZentrigDoo_Hires_Upper.stl"); System.Console.WriteLine("Loaded..."); p.StopAllAndStartNew("check"); //mesh.CheckValidity(); System.Console.WriteLine("Checked..."); double time_ticks = 0; int Niters = 10; DMesh3 mesh = null; for (int k = 0; k < Niters; ++k) { mesh = new DMesh3(loadMesh); int N = 100000; System.Console.WriteLine("Reducing from {0} to {1}...", mesh.TriangleCount, N); BlockTimer reduceT = p.StopAllAndStartNew("reduce"); Reducer r = new Reducer(mesh); //r.MinimizeQuadricPositionError = false; r.ENABLE_PROFILING = true; //DMeshAABBTree3 tree = new DMeshAABBTree3(new DMesh3(mesh)); //tree.Build(); //MeshProjectionTarget target = new MeshProjectionTarget(tree.Mesh, tree); //r.SetProjectionTarget(target); //r.ProjectionMode = Reducer.TargetProjectionMode.Inline; //r.SetExternalConstraints(new MeshConstraints()); ////MeshConstraintUtil.PreserveBoundaryLoops(r.Constraints, mesh); //MeshConstraintUtil.FixAllBoundaryEdges(r.Constraints, mesh); r.ReduceToTriangleCount(N); //double min, max, avg; //MeshQueries.EdgeLengthStats(mesh, out min, out max, out avg); //r.ReduceToEdgeLength(avg * 1.5); //System.Console.WriteLine("Reduced..."); p.Stop("reduce"); time_ticks += reduceT.Watch.Elapsed.Ticks; System.Console.WriteLine(p.AllTimes()); GC.Collect(); } TimeSpan total = new TimeSpan((int)(time_ticks / (double)Niters)); System.Console.WriteLine("AVERAGE: {0}", string.Format("{0:ss}.{0:ffffff}", total)); TestUtil.WriteDebugMesh(mesh, "__REDUCED.obj"); }
public BasicStore(Reducer <State> rootReducer) { this.rootReducer = rootReducer; state = rootReducer(state, new InitStoreAction()); }
public static void test_reduce_constraints_fixedverts() { int Slices = 128; DMesh3 mesh = TestUtil.MakeCappedCylinder(false, Slices); MeshUtil.ScaleMesh(mesh, Frame3f.Identity, new Vector3f(1, 2, 1)); mesh.CheckValidity(); AxisAlignedBox3d bounds = mesh.CachedBounds; // construct mesh projection target DMesh3 meshCopy = new DMesh3(mesh); meshCopy.CheckValidity(); DMeshAABBTree3 tree = new DMeshAABBTree3(meshCopy); tree.Build(); MeshProjectionTarget target = new MeshProjectionTarget() { Mesh = meshCopy, Spatial = tree }; if (WriteDebugMeshes) { TestUtil.WriteTestOutputMesh(mesh, "reduce_fixed_constraints_test_before.obj"); } // construct constraint set MeshConstraints cons = new MeshConstraints(); //EdgeRefineFlags useFlags = EdgeRefineFlags.NoCollapse; EdgeRefineFlags useFlags = EdgeRefineFlags.PreserveTopology; foreach (int eid in mesh.EdgeIndices()) { double fAngle = MeshUtil.OpeningAngleD(mesh, eid); if (fAngle > 30.0f) { cons.SetOrUpdateEdgeConstraint(eid, new EdgeConstraint(useFlags) { TrackingSetID = 1 }); Index2i ev = mesh.GetEdgeV(eid); int nSetID0 = (mesh.GetVertex(ev[0]).y > bounds.Center.y) ? 1 : 2; int nSetID1 = (mesh.GetVertex(ev[1]).y > bounds.Center.y) ? 1 : 2; cons.SetOrUpdateVertexConstraint(ev[0], new VertexConstraint(true, nSetID0)); cons.SetOrUpdateVertexConstraint(ev[1], new VertexConstraint(true, nSetID1)); } } Reducer r = new Reducer(mesh); r.SetExternalConstraints(cons); r.SetProjectionTarget(target); r.ReduceToTriangleCount(50); mesh.CheckValidity(); if (WriteDebugMeshes) { TestUtil.WriteTestOutputMesh(mesh, "reduce_fixed_constraints_test_after.obj"); } }
public Production(int nonterminalType, [Delayed] Reducer reduction) { this.NonterminalType = nonterminalType; this.Reduction = reduction; }
public RxStore(Reducer <TState> rootReducer) : base(rootReducer) { _stateChanged = new BehaviorSubject <TState>(GetState()); SubscribeToStateChange(); }
private UnitOfMeasure MultiplyOrDivide(UnitOfMeasure other, bool invert) { if (other == null) { throw new Exception(MeasurementSystem.GetMessage("unit.cannot.be.null")); } CheckOffset(this); CheckOffset(other); // this base symbol map Reducer thisReducer = GetReducer(); Dictionary <UnitOfMeasure, int> thisMap = thisReducer.Terms; // other base symbol map Reducer otherReducer = other.GetReducer(); Dictionary <UnitOfMeasure, int> otherMap = otherReducer.Terms; // create a map of the unit of measure powers Dictionary <UnitOfMeasure, int> resultMap = new Dictionary <UnitOfMeasure, int>(); // iterate over the multiplier's unit map foreach (KeyValuePair <UnitOfMeasure, int> thisEntry in thisMap) { UnitOfMeasure thisUOM = thisEntry.Key; int thisPower = thisEntry.Value; if (otherMap.TryGetValue(thisUOM, out int otherPower)) { if (!invert) { // add to multiplier's power thisPower += otherPower; } else { // subtract from dividend's power thisPower -= otherPower; } // remove multiplicand or divisor UOM otherMap.Remove(thisUOM); } if (thisPower != 0) { resultMap[thisUOM] = thisPower; } } // add any remaining multiplicand terms and invert any remaining divisor // terms foreach (KeyValuePair <UnitOfMeasure, int> otherEntry in otherMap) { UnitOfMeasure otherUOM = otherEntry.Key; int otherPower = otherEntry.Value; if (!invert) { resultMap[otherUOM] = otherPower; } else { resultMap[otherUOM] = -otherPower; } } // get the base symbol and possibly base UOM Reducer resultReducer = new Reducer(); resultReducer.Terms = resultMap; // product or quotient UnitOfMeasure result = new UnitOfMeasure(); if (!invert) { result.SetProductUnits(this, other); } else { result.SetQuotientUnits(this, other); } if (!invert) { result.Symbol = GenerateProductSymbol(result.GetMultiplier(), result.GetMultiplicand()); } else { result.Symbol = GenerateQuotientSymbol(result.GetDividend(), result.GetDivisor()); } // constrain to a maximum length if (result.Symbol.Length > MAX_SYMBOL_LENGTH) { result.Symbol = GenerateIntermediateSymbol(); } String baseSymbol = resultReducer.BuildBaseString(); UnitOfMeasure baseUOM = MeasurementSystem.GetSystem().GetBaseUOM(baseSymbol); if (baseUOM != null) { // there is a conversion to the base UOM double thisFactor = thisReducer.MapScalingFactor; double otherFactor = otherReducer.MapScalingFactor; double resultFactor = 0; if (!invert) { resultFactor = thisFactor * otherFactor; } else { resultFactor = thisFactor / otherFactor; } result.ScalingFactor = resultFactor; result.AbscissaUnit = baseUOM; result.UOMType = baseUOM.UOMType; } return(result); }
public void AddReducer <A>(Reducer <T, A> reducer) { this._reducerDictionary[typeof(A)] = (state, action) => reducer(state, (A)action); }
/// <summary> /// Initializes a new instance of <see cref="StoreBuilder{TState}"/> class. /// </summary> /// <param name="reducer"> /// A reducing function that returns the next state tree. /// </param> public StoreBuilder(Reducer <TState> reducer) { this.reducer = reducer ?? throw new ArgumentNullException(nameof(reducer)); }
public void RemoveReducer <A>(Reducer <T, A> reducer) { this._reducerDictionary.Remove(typeof(A)); }
protected virtual void compute_shell_distancefield() { if (cached_is_closed == false) { compute_shell_distancefield_unsigned(); return; } double offset_distance = shell_thickness; Interval1d shell_range = new Interval1d(0, offset_distance); if (shell_direction == ShellDirections.Symmetric) { shell_range = new Interval1d(-offset_distance / 2, offset_distance / 2); } else if (shell_direction == ShellDirections.Inner) { shell_range = new Interval1d(-offset_distance, 0); offset_distance = -offset_distance; } if (cached_sdf == null || shell_thickness > cached_sdf_max_offset || grid_cell_size != cached_sdf.CellSize) { DMesh3 meshIn = MeshSource.GetDMeshUnsafe(); int exact_cells = (int)((shell_thickness) / grid_cell_size) + 1; // only use spatial DS if we are computing enough cells DMeshAABBTree3 use_spatial = GenerateClosedMeshOp.MeshSDFShouldUseSpatial(input_spatial, exact_cells, grid_cell_size, input_mesh_edge_stats.z); MeshSignedDistanceGrid sdf = new MeshSignedDistanceGrid(meshIn, grid_cell_size, use_spatial) { ExactBandWidth = exact_cells }; if (use_spatial != null) { sdf.NarrowBandMaxDistance = shell_thickness + grid_cell_size; sdf.ComputeMode = MeshSignedDistanceGrid.ComputeModes.NarrowBand_SpatialFloodFill; } sdf.CancelF = is_invalidated; sdf.Compute(); if (is_invalidated()) { return; } cached_sdf = sdf; cached_sdf_max_offset = shell_thickness; cached_sdf_bounds = meshIn.CachedBounds; } var iso = new DenseGridTrilinearImplicit(cached_sdf.Grid, cached_sdf.GridOrigin, cached_sdf.CellSize); BoundedImplicitFunction3d shell_field = (shell_direction == ShellDirections.Symmetric) ? (BoundedImplicitFunction3d) new ImplicitShell3d() { A = iso, Inside = shell_range } : (BoundedImplicitFunction3d) new ImplicitOffset3d() { A = iso, Offset = offset_distance }; //var shell_field = new ImplicitShell3d() { A = iso, Inside = shell_range }; //BoundedImplicitFunction3d shell_field = (signed_field) ? // (BoundedImplicitFunction3d)new ImplicitShell3d() { A = iso, Inside = shell_range } : // (BoundedImplicitFunction3d)new ImplicitOffset3d() { A = iso, Offset = offset_distance }; //ImplicitOffset3d offset = new ImplicitOffset3d() { A = iso, Offset = offset_distance }; MarchingCubes c = new MarchingCubes(); c.Implicit = shell_field; c.IsoValue = 0; c.Bounds = cached_sdf_bounds; c.CubeSize = mesh_cell_size; c.Bounds.Expand(offset_distance + 3 * c.CubeSize); c.RootMode = MarchingCubes.RootfindingModes.LerpSteps; c.RootModeSteps = 5; c.CancelF = is_invalidated; c.Generate(); if (is_invalidated()) { return; } Reducer r = new Reducer(c.Mesh); r.FastCollapsePass(c.CubeSize * 0.5, 3, true); if (is_invalidated()) { return; } if (min_component_volume > 0) { MeshEditor.RemoveSmallComponents(c.Mesh, min_component_volume, min_component_volume); } if (is_invalidated()) { return; } if (shell_surface_only) { if (shell_direction == ShellDirections.Inner || shell_direction == ShellDirections.Outer) { c.Mesh.AttachMetadata("is_partial", new object()); } } ResultMesh = c.Mesh; }
public static Reducer <TContext, TModel> NewReducer <TContext, TModel>(Reducer <TContext, TModel> prototype, Reducer <TContext, TModel> reducer) => reducer;
internal static SqlNode Reduce(SqlNode node, SqlNodeAnnotations annotations, Enum[] providerModesWithIncompatibilities) { Reducer r = new Reducer(providerModesWithIncompatibilities) {Annotations = annotations}; return r.Visit(node); }
internal static SqlNode Reduce(SqlNode node, SqlNodeAnnotations annotations) { Reducer r = new Reducer(); r.Annotations = annotations; return r.Visit(node); }
public static void Main() { Reducer red = new Reducer(); advance(); while (!endInput()) { red.reduce(nextKey, new WmrIterator(nextKey)); } }