public Node compile(FeatureLayer layer, FeatureCursor cursor, FilterGraph graph) { osg.ref_ptr <FilterEnv> env = getSession().createFilterEnv(); env.setExtent(getAreaOfInterest(layer)); env.setInputSRS(layer.getSRS()); env.setTerrainNode(terrain.get()); env.setTerrainSRS(terrain_srs.get()); env.setTerrainReadCallback(read_cb.get()); osg.Group * output; FilterGraphResult r = graph.computeNodes(cursor, env.get(), output); return(r.isOK() ? output : NULL); }
/** * Runs the graph to generate a feature store. The graph should only * contain FeatureFilter and CollectionFilter type filters. * * Executes the graph by passing features to the first filter in the * chain. That filter will process the data, pass the results along to * the next filter, and so on until completion. * * @param cursor * Source cursor for features to process * @param env * Contextual compilation environment * @param output_uri * URI of a feature store to create and in which to store the results * @return * A structure describing the result of the compilation. */ public FilterGraphResult computeFeatureStore(FeatureCursor cursor, FilterEnv env, string output_uri) { #if TODO bool ok = false; // first build the filter state chain, validating that there are ONLY feature filters // present. No other filter type is permitted when generating a feature store. FilterState first = null; foreach (Filter i in filter_prototypes) { Filter filter = i; if (!(filter is FeatureFilter)) { //TODO osgGIS.notify(osg.WARN) << "Error: illegal filter of type \"" << filter.getFilterType() << "\" in graph. Only feature features are allowed." << std.endl; return(FilterGraphResult.error("Illegal first filter type in filter graph")); } FilterState next_state = filter.newState(); if (first == null) { first = next_state; } else { first.appendState(next_state); } } if (first == null) { //TODO osgGIS.notify(osg.WARN) << "Error: filter graph \"" << getName() << "\" is empty." << std.endl; return(FilterGraphResult.error("Illegal: empty filter graph")); } // next, append a WriteFeatures filter that will generate the output // feature store. WriteFeaturesFilter writer = new WriteFeaturesFilter(); writer.setOutputURI(output_uri); //writer.setAppendMode( WriteFeaturesFilter.OVERWRITE ); FilterState output_state = writer.newState(); first.appendState(output_state); // now run the graph. FilterStateResult state_result; int count = 0; osg.Timer_t start = osg.Timer.instance().tick(); env.setOutputSRS(env.getInputSRS()); FeatureFilterState state = (FeatureFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(cursor.next()); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } osg.Timer_t end = osg.Timer.instance().tick(); double dur = osg.Timer.instance().delta_s(start, end); if (state_result.isOK()) { return(FilterGraphResult.ok(output_state.getLastKnownFilterEnv())); } else { return(FilterGraphResult.error("Filter graph failed to compute feature store")); } #endif throw new NotImplementedException(); }
/** * Copy constructor */ public FilterGraphResult(FilterGraphResult rhs) { is_ok = rhs.is_ok; out_env = rhs.out_env; }
/** * Runs the graph to generate a scene graph. * * Executes the graph by passing features to the first filter in the * chain. That filter will process the data, pass the results along to * the next filter, and so on until completion. * * @param cursor * Source cursor for features to process * @param env * Contextual compilation environment * @param output * Group node that, upon success, contains resulting nodes of compiled scene * as its children * @return * A structure describing the result of the compilation. */ public FilterGraphResult computeNodes(FeatureCursor cursor, FilterEnv env, osg.Group output) { FilterStateResult state_result; output = null; NodeFilterState output_state; // first build a new state chain corresponding to our filter prototype chain. FilterState first = null; foreach (Filter i in filter_prototypes) { FilterState next_state = i.newState(); if (first == null) { first = next_state; } else { first.appendState(next_state); } if (next_state is NodeFilterState) { output_state = (NodeFilterState)next_state; } } // now traverse the states. if (first != null) { int count = 0; osg.Timer_t start = osg.Timer.instance().tick(); env.setOutputSRS(env.getInputSRS()); if (first is FeatureFilterState) { FeatureFilterState state = (FeatureFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } else if (first is FragmentFilterState) { FragmentFilterState state = (FragmentFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } else if (first is CollectionFilterState) { CollectionFilterState state = (CollectionFilterState)first; while (state_result.isOK() && cursor.hasNext()) { state.push(wind(cursor.next())); state_result = state.traverse(env); count++; } if (state_result.isOK()) { state_result = state.signalCheckpoint(); } } osg.Timer_t end = osg.Timer.instance().tick(); double dur = osg.Timer.instance().delta_s(start, end); //osgGIS.notify( osg.ALWAYS ) << std.endl << "Time = " << dur << " s; Per Feature Avg = " << (dur/(double)count) << " s" << std.endl; } else { state_result.set(FilterStateResult.Status.STATUS_NODATA); } if (output_state != null && state_result.hasData()) { output = new osg.Group(); foreach (AttributedNode i in output_state.getOutput()) { output.addChild(i.getNode()); } } if (state_result.isOK()) { return(FilterGraphResult.ok(output_state.getLastKnownFilterEnv())); } else { return(FilterGraphResult.error("Filter graph failed to compute any nodes")); } }