private void buildIndex() { for (FeatureCursor cursor = store.getCursor(); cursor.hasNext();) { Feature feature = cursor.next(); extent.expandToInclude(feature.getExtent()); } }
/** * Copy constructor */ public FeatureCursor(FeatureCursor rhs) { this.oids = rhs.oids; this.store = rhs.store; this.iter = rhs.iter; this.search_extent = rhs.search_extent; this.match_exactly = rhs.match_exactly; this.prefetch_size = rhs.prefetch_size; this.prefetched_results = rhs.prefetched_results; this.last_result = rhs.last_result; this.at_bof = rhs.at_bof; }
public Node compile(FeatureLayer layer, FeatureCursor cursor, FilterGraph graph) { osg.ref_ptr <FilterEnv> env = getSession().createFilterEnv(); env.setExtent(getAreaOfInterest(layer)); env.setInputSRS(layer.getSRS()); env.setTerrainNode(terrain.get()); env.setTerrainSRS(terrain_srs.get()); env.setTerrainReadCallback(read_cb.get()); osg.Group * output; FilterGraphResult r = graph.computeNodes(cursor, env.get(), output); return(r.isOK() ? output : NULL); }
public FeatureCursor getCursor(GeoExtent query_extent, bool match_exactly) { FeatureOIDList oids = new FeatureOIDList(); for (FeatureCursor cursor = store.getCursor(); cursor.hasNext();) { Feature feature = cursor.next(); GeoExtent f_extent = feature.getExtent(); if (f_extent.intersects(query_extent)) { oids.Add(feature.getOID()); } } return(new FeatureCursor(oids, store, query_extent, match_exactly)); }
public Node compile(FeatureLayer layer, FeatureCursor cursor, string output_file) { osg.Node *result = NULL; if (!layer) { osgGIS.notify(osg.WARN) << "Illegal null feature layer" << std.endl; return(NULL); } osg.ref_ptr <osg.LOD> lod = new osg.LOD(); if (getFadeLODs()) { FadeHelper.enableFading(lod.getOrCreateStateSet()); } for (FilterGraphRangeList.iterator i = graph_ranges.begin(); i != graph_ranges.end(); i++) { osg.Node *range = compile(layer, cursor, i.graph.get()); if (range) { lod.addChild(range, i.min_range, i.max_range); if (getFadeLODs()) { FadeHelper.setOuterFadeDistance(i.max_range, range.getOrCreateStateSet()); FadeHelper.setInnerFadeDistance(i.max_range - .2 * (i.max_range - i.min_range), range.getOrCreateStateSet()); } } } if (GeomUtils.hasDrawables(lod.get())) { if (getOverlay()) { result = convertToOverlay(lod.get()); } else { result = lod.release(); } osgUtil.Optimizer opt; opt.optimize(result, osgUtil.Optimizer.SPATIALIZE_GROUPS | osgUtil.Optimizer.STATIC_OBJECT_DETECTION | osgUtil.Optimizer.SHARE_DUPLICATE_STATE); if (getRenderOrder() >= 0) { string bin_name = result.getOrCreateStateSet().getBinName(); result.getOrCreateStateSet().setRenderBinDetails(getRenderOrder(), bin_name); result.getOrCreateStateSet().setAttributeAndModes(new osg.Depth(osg.Depth.ALWAYS), osg.StateAttribute.ON); } localizeResourceReferences(result); if (output_file.length() > 0) { localizeResources(osgDB.getFilePath(output_file)); } } return(result); }
/** * Compiles a feature layer. * * @param layer * Feature layer to compile * @param cursor * Iterator over custom connection of features to compile * @param output_file * If getLocalizeResources() == true, the compiler will localize * external resources to the directory containing the specified * file. This compiler does not actually write anything to the * named file however. * @return * Resulting scene graph, or NULL upon error */ public Node compile(FeatureLayer layer, FeatureCursor cursor) { return(compile(layer, cursor, "")); }
public Node compile(FeatureLayer layer, string output_file) { FeatureCursor cursor = layer.getCursor(); return(compile(layer, cursor, output_file)); }
/** * Runs the graph to generate a feature store. The graph should only * contain FeatureFilter and CollectionFilter type filters. * * Executes the graph by passing features to the first filter in the * chain. That filter will process the data, pass the results along to * the next filter, and so on until completion. * * @param cursor * Source cursor for features to process * @param env * Contextual compilation environment * @param output_uri * URI of a feature store to create and in which to store the results * @return * A structure describing the result of the compilation. */ public FilterGraphResult computeFeatureStore(FeatureCursor cursor, FilterEnv env, string output_uri) { #if TODO bool ok = false; // first build the filter state chain, validating that there are ONLY feature filters // present. No other filter type is permitted when generating a feature store. FilterState first = null; foreach (Filter i in filter_prototypes) { Filter filter = i; if (!(filter is FeatureFilter)) { //TODO osgGIS.notify(osg.WARN) << "Error: illegal filter of type \"" << filter.getFilterType() << "\" in graph. Only feature features are allowed." << std.endl; return(FilterGraphResult.error("Illegal first filter type in filter graph")); } FilterState next_state = filter.newState(); if (first == null) { first = next_state; } else { first.appendState(next_state); } } if (first == null) { //TODO osgGIS.notify(osg.WARN) << "Error: filter graph \"" << getName() << "\" is empty." << std.endl; return(FilterGraphResult.error("Illegal: empty filter graph")); } // next, append a WriteFeatures filter that will generate the output // feature store. WriteFeaturesFilter writer = new WriteFeaturesFilter(); writer.setOutputURI(output_uri); //writer.setAppendMode( WriteFeaturesFilter.OVERWRITE ); FilterState output_state = writer.newState(); first.appendState(output_state); // now run the graph. FilterStateResult state_result; int count = 0; osg.Timer_t start = osg.Timer.instance().tick(); env.setOutputSRS(env.getInputSRS()); FeatureFilterState state = (FeatureFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(cursor.next()); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } osg.Timer_t end = osg.Timer.instance().tick(); double dur = osg.Timer.instance().delta_s(start, end); if (state_result.isOK()) { return(FilterGraphResult.ok(output_state.getLastKnownFilterEnv())); } else { return(FilterGraphResult.error("Filter graph failed to compute feature store")); } #endif throw new NotImplementedException(); }
/** * Runs the graph to generate a scene graph. * * Executes the graph by passing features to the first filter in the * chain. That filter will process the data, pass the results along to * the next filter, and so on until completion. * * @param cursor * Source cursor for features to process * @param env * Contextual compilation environment * @param output * Group node that, upon success, contains resulting nodes of compiled scene * as its children * @return * A structure describing the result of the compilation. */ public FilterGraphResult computeNodes(FeatureCursor cursor, FilterEnv env, osg.Group output) { FilterStateResult state_result; output = null; NodeFilterState output_state; // first build a new state chain corresponding to our filter prototype chain. FilterState first = null; foreach (Filter i in filter_prototypes) { FilterState next_state = i.newState(); if (first == null) { first = next_state; } else { first.appendState(next_state); } if (next_state is NodeFilterState) { output_state = (NodeFilterState)next_state; } } // now traverse the states. if (first != null) { int count = 0; osg.Timer_t start = osg.Timer.instance().tick(); env.setOutputSRS(env.getInputSRS()); if (first is FeatureFilterState) { FeatureFilterState state = (FeatureFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } else if (first is FragmentFilterState) { FragmentFilterState state = (FragmentFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } else if (first is CollectionFilterState) { CollectionFilterState state = (CollectionFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } osg.Timer_t end = osg.Timer.instance().tick(); double dur = osg.Timer.instance().delta_s(start, end); //osgGIS.notify( osg.ALWAYS ) << std.endl << "Time = " << dur << " s; Per Feature Avg = " << (dur/(double)count) << " s" << std.endl; } else { state_result.set(FilterStateResult.Status.STATUS_NODATA); } if (output_state != null && state_result.hasData()) { output = new osg.Group(); foreach (AttributedNode i in output_state.getOutput()) { output.addChild(i.getNode()); } } if (state_result.isOK()) { return(FilterGraphResult.ok(output_state.getLastKnownFilterEnv())); } else { return(FilterGraphResult.error("Filter graph failed to compute any nodes")); } }