Revision 3035
Added by perry about 18 years ago
src/edu/ucsb/nceas/metacat/spatial/SpatialDataset.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Copyright: 2003 Regents of the University of California. |
|
4 |
* |
|
5 |
* Author: Matthew Perry |
|
6 |
* '$Date$' |
|
7 |
* '$Revision$' |
|
8 |
* |
|
9 |
* This program is free software; you can redistribute it and/or modify |
|
10 |
* it under the terms of the GNU General Public License as published by |
|
11 |
* the Free Software Foundation; either version 2 of the License, or |
|
12 |
* (at your option) any later version. |
|
13 |
* |
|
14 |
* This program is distributed in the hope that it will be useful, |
|
15 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
16 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
17 |
* GNU General Public License for more details. |
|
18 |
* |
|
19 |
* You should have received a copy of the GNU General Public License |
|
20 |
* along with this program; if not, write to the Free Software |
|
21 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
22 |
*/ |
|
23 |
package edu.ucsb.nceas.metacat.spatial; |
|
24 |
|
|
25 |
import org.geotools.feature.FeatureType; |
|
26 |
import org.geotools.data.shapefile.ShapefileDataStore; |
|
27 |
import org.geotools.data.FeatureStore; |
|
28 |
import org.geotools.data.DefaultTransaction; |
|
29 |
import org.geotools.data.Transaction; |
|
30 |
import org.geotools.feature.Feature; |
|
31 |
import org.geotools.filter.Filter; |
|
32 |
import org.geotools.filter.CompareFilter; |
|
33 |
import org.geotools.filter.FilterFactory; |
|
34 |
import org.geotools.filter.FilterFactoryFinder; |
|
35 |
import org.geotools.filter.IllegalFilterException; |
|
36 |
import org.geotools.feature.FeatureCollection; |
|
37 |
import org.geotools.feature.FeatureCollections; |
|
38 |
|
|
39 |
import org.apache.log4j.Logger; |
|
40 |
|
|
41 |
import java.io.File; |
|
42 |
import java.io.IOException; |
|
43 |
|
|
44 |
import java.net.URI; |
|
45 |
import java.net.URL; |
|
46 |
import java.net.MalformedURLException; |
|
47 |
|
|
48 |
import edu.ucsb.nceas.metacat.MetaCatUtil; |
|
49 |
|
|
50 |
|
|
51 |
public class SpatialDataset { |
|
52 |
|
|
53 |
SpatialFeatureSchema featureSchema = new SpatialFeatureSchema(); |
|
54 |
|
|
55 |
//public String polygonShpUri = MetaCatUtil.getOption("certPath") + "data/metacat_shps/data_bounds.shp"; |
|
56 |
//public String pointShpUri = MetaCatUtil.getOption("certPath")+ "data/metacat_shps/data_points.shp"; |
|
57 |
|
|
58 |
FeatureCollection polygonCollection = FeatureCollections.newCollection(); |
|
59 |
FeatureCollection pointCollection = FeatureCollections.newCollection(); |
|
60 |
|
|
61 |
private static Logger log = |
|
62 |
Logger.getLogger(SpatialDataset.class.getName()); |
|
63 |
|
|
64 |
/** empty constructor **/ |
|
65 |
public SpatialDataset() { |
|
66 |
|
|
67 |
} |
|
68 |
|
|
69 |
/* |
|
70 |
* Adds a new feature (from a SpatialDocument) |
|
71 |
* This is faster than insertOrUpdate but |
|
72 |
* relying on this method might cause duplication |
|
73 |
* of docids in the spatial cache. Therefore, its really only useful when |
|
74 |
* regenerating the entire cache. |
|
75 |
*/ |
|
76 |
public void add( String geomType, Feature feature ) { |
|
77 |
if( geomType.equals("polygon") ) { |
|
78 |
// Check if feature actually is a multipolygon |
|
79 |
if( feature != null) |
|
80 |
polygonCollection.add( feature ); |
|
81 |
|
|
82 |
} else if( geomType.equals("point") ) { |
|
83 |
// Check if feature actually is a multipoint |
|
84 |
if( feature != null) |
|
85 |
pointCollection.add( feature ); |
|
86 |
} |
|
87 |
} |
|
88 |
|
|
89 |
/* |
|
90 |
* Deletes given docid from the spatial cache. |
|
91 |
*/ |
|
92 |
public void delete( String geomType, String docid ) throws IOException { |
|
93 |
|
|
94 |
FilterFactory filterFactory = FilterFactoryFinder.createFilterFactory(); |
|
95 |
CompareFilter filter = null; |
|
96 |
FeatureStore fStore = null; |
|
97 |
ShapefileDataStore dStore = null; |
|
98 |
|
|
99 |
try { |
|
100 |
// Create the filter |
|
101 |
filter = filterFactory.createCompareFilter(CompareFilter.COMPARE_EQUALS); |
|
102 |
filter.addLeftValue(filterFactory.createAttributeExpression("docid")); |
|
103 |
filter.addRightValue(filterFactory.createLiteralExpression(docid)); |
|
104 |
} catch (IllegalFilterException e) { |
|
105 |
e.printStackTrace(); |
|
106 |
} |
|
107 |
|
|
108 |
// Begin new transaction |
|
109 |
Transaction t = new DefaultTransaction("handle"); |
|
110 |
t.putProperty( "updating spatial cache", new Integer(7) ); |
|
111 |
String lockId = "SpatialDataset.delete"; |
|
112 |
|
|
113 |
try { |
|
114 |
|
|
115 |
boolean validGeomType = false; |
|
116 |
|
|
117 |
if( geomType.equals("polygon") ) { |
|
118 |
dStore = new ShapefileDataStore( (new File( featureSchema.polygonShpUri )).toURL() ); |
|
119 |
fStore = (FeatureStore) dStore.getFeatureSource(dStore.getTypeNames()[0]); |
|
120 |
validGeomType = true; |
|
121 |
} else if( geomType.equals("point") ) { |
|
122 |
dStore = new ShapefileDataStore( (new File( featureSchema.pointShpUri )).toURL() ); |
|
123 |
fStore = (FeatureStore) dStore.getFeatureSource(dStore.getTypeNames()[0]); |
|
124 |
validGeomType = true; |
|
125 |
} |
|
126 |
|
|
127 |
// Initiate the transaction |
|
128 |
fStore.setTransaction( t ); |
|
129 |
t.addAuthorization( lockId ); |
|
130 |
|
|
131 |
if( validGeomType == true) { |
|
132 |
// Remove old feature |
|
133 |
fStore.removeFeatures( filter ); |
|
134 |
|
|
135 |
// Commit changes to shapefile |
|
136 |
t.commit(); |
|
137 |
log.info(" Delete docid " + docid + " from spatial cache"); |
|
138 |
} |
|
139 |
|
|
140 |
} catch (MalformedURLException e) { |
|
141 |
e.printStackTrace(); |
|
142 |
t.rollback(); // cancel opperations |
|
143 |
} catch (IOException e) { |
|
144 |
e.printStackTrace(); |
|
145 |
t.rollback(); // cancel opperations |
|
146 |
} finally { |
|
147 |
// Close out the transaction |
|
148 |
t.close(); |
|
149 |
} |
|
150 |
|
|
151 |
} |
|
152 |
|
|
153 |
/* |
|
154 |
* Either inserts or updates the spatial cache with the new |
|
155 |
* spatial document depending on if it currently exists. |
|
156 |
* Docid is also passed in for quicker searching. |
|
157 |
*/ |
|
158 |
public void insertOrUpdate( String geomType, Feature feature, String docid ) throws IOException { |
|
159 |
|
|
160 |
FeatureCollection fColl = FeatureCollections.newCollection(); |
|
161 |
FilterFactory filterFactory = FilterFactoryFinder.createFilterFactory(); |
|
162 |
CompareFilter filter = null; |
|
163 |
FeatureStore fStore = null; |
|
164 |
ShapefileDataStore dStore = null; |
|
165 |
|
|
166 |
// Explain why geotools fails to create the projection info from the shapefile |
|
167 |
log.info( " The '.prj' errors below are related to a geotools bug " + |
|
168 |
" (http://jira.codehaus.org/browse/GEOT-604) and can be ignored"); |
|
169 |
|
|
170 |
try { |
|
171 |
// Create the filter |
|
172 |
filter = filterFactory.createCompareFilter(CompareFilter.COMPARE_EQUALS); |
|
173 |
filter.addLeftValue(filterFactory.createAttributeExpression("docid")); |
|
174 |
filter.addRightValue(filterFactory.createLiteralExpression(docid)); |
|
175 |
} catch (IllegalFilterException e) { |
|
176 |
e.printStackTrace(); |
|
177 |
} |
|
178 |
|
|
179 |
// Begin new transaction |
|
180 |
Transaction t = new DefaultTransaction("handle"); |
|
181 |
t.putProperty( "updating spatial cache", new Integer(7) ); |
|
182 |
String lockId = "SpatialDataset.insertOrUpdate"; |
|
183 |
|
|
184 |
try { |
|
185 |
|
|
186 |
boolean validGeomType = false; |
|
187 |
|
|
188 |
if( geomType.equals("polygon") ) { |
|
189 |
dStore = new ShapefileDataStore( (new File( featureSchema.polygonShpUri )).toURL() ); |
|
190 |
fStore = (FeatureStore) dStore.getFeatureSource(dStore.getTypeNames()[0]); |
|
191 |
validGeomType = true; |
|
192 |
} else if( geomType.equals("point") ) { |
|
193 |
dStore = new ShapefileDataStore( (new File( featureSchema.pointShpUri )).toURL() ); |
|
194 |
fStore = (FeatureStore) dStore.getFeatureSource(dStore.getTypeNames()[0]); |
|
195 |
validGeomType = true; |
|
196 |
} |
|
197 |
|
|
198 |
// Initiate the transaction |
|
199 |
fStore.setTransaction( t ); |
|
200 |
t.addAuthorization( lockId ); |
|
201 |
|
|
202 |
if( feature != null && validGeomType == true) { |
|
203 |
// Remove old feature |
|
204 |
fStore.removeFeatures( filter ); |
|
205 |
|
|
206 |
// Create new feature collection then add it to feature Store |
|
207 |
fColl.add( feature ); |
|
208 |
fStore.addFeatures(fColl); |
|
209 |
|
|
210 |
// Commit changes to shapefile |
|
211 |
t.commit(); |
|
212 |
log.info(" Insert or Update docid " + docid + " from spatial cache"); |
|
213 |
} |
|
214 |
|
|
215 |
|
|
216 |
} catch (MalformedURLException e) { |
|
217 |
e.printStackTrace(); |
|
218 |
t.rollback(); // cancel opperations |
|
219 |
} catch (IOException e) { |
|
220 |
e.printStackTrace(); |
|
221 |
t.rollback(); // cancel opperations |
|
222 |
} finally { |
|
223 |
// Close out the transaction |
|
224 |
t.close(); |
|
225 |
} |
|
226 |
|
|
227 |
|
|
228 |
} |
|
229 |
|
|
230 |
/* |
|
231 |
* Saves the SpatialDataset object to the spatial cache |
|
232 |
*/ |
|
233 |
public void save() { |
|
234 |
// Save Polygons |
|
235 |
try { |
|
236 |
URL anURL = (new File( featureSchema.polygonShpUri )).toURL(); |
|
237 |
ShapefileDataStore polygonDatastore = new ShapefileDataStore(anURL); |
|
238 |
FeatureType polygonType = featureSchema.getPolygonFeatureType(); |
|
239 |
polygonDatastore.createSchema( polygonType ); |
|
240 |
FeatureStore polygonFeatureStore = (FeatureStore) polygonDatastore.getFeatureSource( polygonType.getTypeName() ); |
|
241 |
polygonFeatureStore.addFeatures( polygonCollection ); |
|
242 |
log.info(" ---- Polygons saved to " + featureSchema.polygonShpUri ); |
|
243 |
} catch(java.net.MalformedURLException e) { |
|
244 |
log.error("Malformed URL Exception : "+e); |
|
245 |
} catch(java.io.IOException e) { |
|
246 |
log.error("IO Exception : "+e); |
|
247 |
|
|
248 |
} |
|
249 |
|
|
250 |
// Save Points |
|
251 |
try { |
|
252 |
URL anURL = (new File( featureSchema.pointShpUri )).toURL(); |
|
253 |
ShapefileDataStore pointDatastore = new ShapefileDataStore(anURL); |
|
254 |
FeatureType pointsType = featureSchema.getPointFeatureType(); |
|
255 |
pointDatastore.createSchema( pointsType ); |
|
256 |
FeatureStore pointFeatureStore = (FeatureStore) pointDatastore.getFeatureSource( pointsType.getTypeName() ); |
|
257 |
pointFeatureStore.addFeatures( pointCollection ); |
|
258 |
log.info(" ---- Polygons saved to " + featureSchema.pointShpUri ); |
|
259 |
} catch(java.net.MalformedURLException e) { |
|
260 |
log.error("Malformed URL Exception : "+e); |
|
261 |
} catch(java.io.IOException e) { |
|
262 |
log.error("IO Exception : "+e); |
|
263 |
} |
|
264 |
} |
|
265 |
|
|
266 |
} |
|
0 | 267 |
src/edu/ucsb/nceas/metacat/spatial/SldFactory.java | ||
---|---|---|
1 |
/* |
|
2 |
* Styled Layer Descriptor Factory |
|
3 |
* Author: Matt Perry |
|
4 |
* Status: testing |
|
5 |
* MPTODO: Use a spatial access constraints class to generate the appropriate SLD filter |
|
6 |
*/ |
|
7 |
package edu.ucsb.nceas.metacat.spatial; |
|
8 |
|
|
9 |
import edu.ucsb.nceas.utilities.XMLUtilities; |
|
10 |
|
|
11 |
import javax.servlet.ServletConfig; |
|
12 |
import javax.servlet.ServletContext; |
|
13 |
import javax.servlet.ServletException; |
|
14 |
import javax.servlet.ServletInputStream; |
|
15 |
import javax.servlet.http.HttpServlet; |
|
16 |
import javax.servlet.http.HttpServletRequest; |
|
17 |
import javax.servlet.http.HttpServletResponse; |
|
18 |
import javax.servlet.http.HttpSession; |
|
19 |
import javax.servlet.http.HttpUtils; |
|
20 |
import javax.servlet.ServletOutputStream; |
|
21 |
|
|
22 |
import javax.xml.parsers.DocumentBuilder; |
|
23 |
import javax.xml.parsers.DocumentBuilderFactory; |
|
24 |
import javax.xml.parsers.FactoryConfigurationError; |
|
25 |
import javax.xml.parsers.ParserConfigurationException; |
|
26 |
|
|
27 |
import org.xml.sax.SAXException; |
|
28 |
import org.xml.sax.SAXParseException; |
|
29 |
|
|
30 |
import java.io.File; |
|
31 |
import java.io.IOException; |
|
32 |
|
|
33 |
import org.w3c.dom.*; |
|
34 |
|
|
35 |
public class SldFactory extends HttpServlet { |
|
36 |
|
|
37 |
static Document document; |
|
38 |
static String sld; |
|
39 |
|
|
40 |
/** Handle "GET" method requests from HTTP clients */ |
|
41 |
public void doGet(HttpServletRequest request, HttpServletResponse response) |
|
42 |
throws ServletException, IOException |
|
43 |
{ |
|
44 |
handleGetOrPost(request, response); |
|
45 |
} |
|
46 |
|
|
47 |
/** Handle "POST" method requests from HTTP clients */ |
|
48 |
public void doPost(HttpServletRequest request, HttpServletResponse response) |
|
49 |
throws ServletException, IOException |
|
50 |
{ |
|
51 |
handleGetOrPost(request, response); |
|
52 |
} |
|
53 |
|
|
54 |
/** |
|
55 |
* Control servlet response depending on the action parameter specified |
|
56 |
*/ |
|
57 |
private void handleGetOrPost(HttpServletRequest request, HttpServletResponse response) |
|
58 |
throws ServletException, IOException |
|
59 |
{ |
|
60 |
//String dataset = request.getParameter("dataset"); |
|
61 |
// MPTODO : Eventually use dataset to determine filename |
|
62 |
String filename = "/var/lib/tomcat5/webapps/knb/style/skins/ebm/spatial/data_bounds_style.sld"; |
|
63 |
String sld = getSld(filename); |
|
64 |
|
|
65 |
response.setContentType("text/xml"); |
|
66 |
response.getWriter().write(sld); |
|
67 |
|
|
68 |
System.out.println("\n*** SldFactory request handled ***\n"); |
|
69 |
} |
|
70 |
|
|
71 |
/** |
|
72 |
* Given a filename of an existing SLD document, |
|
73 |
* reads the sld and adds an ogc:Filter to exclude/include |
|
74 |
* certain docids based on user's permissions. |
|
75 |
* |
|
76 |
* returns SLD document as a String. |
|
77 |
* |
|
78 |
* @param filename Filename of the base sld. |
|
79 |
* |
|
80 |
*/ |
|
81 |
private String getSld(String filename) |
|
82 |
{ |
|
83 |
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); |
|
84 |
|
|
85 |
try { |
|
86 |
DocumentBuilder builder = factory.newDocumentBuilder(); |
|
87 |
document = builder.parse( new File(filename) ); |
|
88 |
} catch (SAXException sxe) { |
|
89 |
sxe.printStackTrace(); |
|
90 |
} catch (ParserConfigurationException pce) { |
|
91 |
pce.printStackTrace(); |
|
92 |
} catch (IOException ioe) { |
|
93 |
ioe.printStackTrace(); |
|
94 |
} |
|
95 |
|
|
96 |
Element root = document.getDocumentElement(); |
|
97 |
NodeList elemList = document.getElementsByTagName("Rule"); |
|
98 |
Node ruleNode = elemList.item(0); |
|
99 |
|
|
100 |
// Get the list of docids due to access contrainsts |
|
101 |
|
|
102 |
// Get the list of docids due to skin filtering of org names |
|
103 |
|
|
104 |
// Get the list of docids due to existing query (eg taxonomic) |
|
105 |
|
|
106 |
// Get the Element/node comprising the ogc:Filter for all of the above |
|
107 |
Element filterElement = getFilterElement(document); |
|
108 |
|
|
109 |
// Append the Filter to the ruleNode |
|
110 |
ruleNode.appendChild(filterElement); |
|
111 |
|
|
112 |
// Write new DOM as a string |
|
113 |
String rulesString = XMLUtilities.getDOMTreeAsString((Node) root, true); |
|
114 |
|
|
115 |
return rulesString; |
|
116 |
} |
|
117 |
|
|
118 |
/** |
|
119 |
* Given a DOM document and a vector of docids, |
|
120 |
* Creates an OGC Filter node |
|
121 |
* |
|
122 |
* @param inDoc DOM document. |
|
123 |
* |
|
124 |
*/ |
|
125 |
private Element getFilterElement(Document inDoc) |
|
126 |
{ |
|
127 |
Element filterElem = inDoc.createElement("ogc:Filter"); |
|
128 |
|
|
129 |
// MPTODO : BEGIN LOOP thru vector of docids |
|
130 |
Element opElem = inDoc.createElement("ogc:PropertyIsEqualTo"); |
|
131 |
|
|
132 |
Element propertyElem = inDoc.createElement("ogc:PropertyName"); |
|
133 |
Text propertyText = inDoc.createTextNode("docid"); |
|
134 |
propertyElem.appendChild(propertyText); |
|
135 |
|
|
136 |
Element literalElem = inDoc.createElement("ogc:Literal"); |
|
137 |
Text literalText = inDoc.createTextNode("perry.1"); // i |
|
138 |
literalElem.appendChild(literalText); |
|
139 |
|
|
140 |
opElem.appendChild(propertyElem); |
|
141 |
opElem.appendChild(literalElem); |
|
142 |
filterElem.appendChild(opElem); |
|
143 |
// END LOOP |
|
144 |
|
|
145 |
return filterElem; |
|
146 |
} |
|
147 |
|
|
148 |
} |
|
0 | 149 |
src/edu/ucsb/nceas/metacat/spatial/WmsFilter.java | ||
---|---|---|
1 |
/* WMS Filter |
|
2 |
* Author: MP |
|
3 |
* Status: Just a test |
|
4 |
* MPTODO: make this a true servlet filter to append sld to any incoming wms request |
|
5 |
*/ |
|
6 |
package edu.ucsb.nceas.metacat.spatial; |
|
7 |
|
|
8 |
import javax.servlet.*; |
|
9 |
import javax.servlet.http.*; |
|
10 |
import java.io.IOException; |
|
11 |
import java.util.Enumeration; |
|
12 |
|
|
13 |
public final class WmsFilter implements Filter { |
|
14 |
private FilterConfig filterConfig = null; |
|
15 |
|
|
16 |
public void init(FilterConfig filterConfig) |
|
17 |
throws ServletException { |
|
18 |
this.filterConfig = filterConfig; |
|
19 |
} |
|
20 |
|
|
21 |
public void destroy() { |
|
22 |
this.filterConfig = null; |
|
23 |
} |
|
24 |
|
|
25 |
public void doFilter(ServletRequest request, |
|
26 |
ServletResponse response, FilterChain chain) |
|
27 |
throws IOException, ServletException { |
|
28 |
|
|
29 |
if (filterConfig == null) |
|
30 |
return; |
|
31 |
|
|
32 |
System.out.println("\n==============="); |
|
33 |
|
|
34 |
System.out.println(" The filter Works !!!"); |
|
35 |
long before = System.currentTimeMillis(); |
|
36 |
|
|
37 |
// Attributes != Paramters but there is no setParameter |
|
38 |
//request.setAttribute("SLD", "http://pmark.msi.ucsb.edu:8180/knb/style/skins/ebm/spatial/data_bounds_style.sld"); |
|
39 |
|
|
40 |
Enumeration e = request.getParameterNames(); |
|
41 |
while( e.hasMoreElements() ) { |
|
42 |
String name = (String) e.nextElement(); |
|
43 |
System.out.println( name + " = " + request.getParameter(name)); |
|
44 |
} |
|
45 |
|
|
46 |
|
|
47 |
chain.doFilter(request, response); |
|
48 |
long after = System.currentTimeMillis(); |
|
49 |
System.out.println(" *** Time " + (after - before) + "ms"); |
|
50 |
|
|
51 |
|
|
52 |
System.out.println("===============\n"); |
|
53 |
|
|
54 |
// A simple redirect won't work since it will filter itself endlessly |
|
55 |
//HttpServletResponse hres = (HttpServletResponse) response; |
|
56 |
//HttpServletRequest hreq = (HttpServletRequest) response; |
|
57 |
//hres.sendRedirect( hreq.getRequestURL().toString() ); |
|
58 |
} |
|
59 |
} |
|
0 | 60 |
src/edu/ucsb/nceas/metacat/spatial/SpatialHarvester.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Copyright: 2000 Regents of the University of California and the |
|
4 |
* National Center for Ecological Analysis and Synthesis |
|
5 |
* |
|
6 |
* Author: Matthew Perry |
|
7 |
* '$Date$' |
|
8 |
* '$Revision$' |
|
9 |
* |
|
10 |
* This program is free software; you can redistribute it and/or modify |
|
11 |
* it under the terms of the GNU General Public License as published by |
|
12 |
* the Free Software Foundation; either version 2 of the License, or |
|
13 |
* (at your option) any later version. |
|
14 |
* |
|
15 |
* This program is distributed in the hope that it will be useful, |
|
16 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
17 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
18 |
* GNU General Public License for more details. |
|
19 |
* |
|
20 |
* You should have received a copy of the GNU General Public License |
|
21 |
* along with this program; if not, write to the Free Software |
|
22 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
23 |
*/ |
|
24 |
|
|
25 |
package edu.ucsb.nceas.metacat.spatial; |
|
26 |
|
|
27 |
import java.sql.ResultSet; |
|
28 |
import java.sql.PreparedStatement; |
|
29 |
import java.util.Vector; |
|
30 |
import java.io.IOException; |
|
31 |
|
|
32 |
import org.apache.log4j.Logger; |
|
33 |
|
|
34 |
import edu.ucsb.nceas.metacat.MetaCatUtil; |
|
35 |
import edu.ucsb.nceas.metacat.DBConnection; |
|
36 |
import org.geotools.feature.Feature; |
|
37 |
|
|
38 |
/* |
|
39 |
* Harvests spatial data from metacat and saves to persistent cache |
|
40 |
*/ |
|
41 |
public class SpatialHarvester { |
|
42 |
|
|
43 |
private static Logger log = Logger.getLogger(SpatialHarvester.class.getName()); |
|
44 |
|
|
45 |
private DBConnection dbconn; |
|
46 |
|
|
47 |
/** constructor to initialize db connection **/ |
|
48 |
public SpatialHarvester() { |
|
49 |
try { |
|
50 |
dbconn = new DBConnection(); |
|
51 |
} catch( Exception e ) { |
|
52 |
log.error("Error getting docids from queryAllDocids"); |
|
53 |
e.printStackTrace(); |
|
54 |
} |
|
55 |
} |
|
56 |
|
|
57 |
/* |
|
58 |
* Closes the database connection |
|
59 |
* MUST be called after you're done with |
|
60 |
* the SpatialHarvester |
|
61 |
*/ |
|
62 |
public void destroy() { |
|
63 |
try { |
|
64 |
dbconn.close(); |
|
65 |
} catch( Exception e ) { |
|
66 |
log.error("Error closing out dbconn"); |
|
67 |
e.printStackTrace(); |
|
68 |
} |
|
69 |
} |
|
70 |
|
|
71 |
/** |
|
72 |
* Returns a Vector of all the docids in the xml_path_index tables |
|
73 |
*/ |
|
74 |
protected Vector queryAllDocids() { |
|
75 |
Vector _docs = new Vector(); |
|
76 |
PreparedStatement pstmt = null; |
|
77 |
ResultSet rs = null; |
|
78 |
|
|
79 |
String query = "select distinct docid from xml_path_index"; |
|
80 |
|
|
81 |
try { |
|
82 |
pstmt = dbconn.prepareStatement(query); |
|
83 |
pstmt.execute(); |
|
84 |
rs = pstmt.getResultSet(); |
|
85 |
while (rs.next()) { |
|
86 |
String docid = rs.getString(1); |
|
87 |
//log.fatal("adding docid: " + docid); |
|
88 |
_docs.add(docid); |
|
89 |
} |
|
90 |
rs.close(); |
|
91 |
pstmt.close(); |
|
92 |
} |
|
93 |
catch(Exception e) { |
|
94 |
log.error("Error getting docids from queryAllDocids"); |
|
95 |
e.printStackTrace(); |
|
96 |
} |
|
97 |
return _docs; |
|
98 |
} |
|
99 |
|
|
100 |
/* |
|
101 |
* Currently just a wrapper around the harvester |
|
102 |
* Eventually we can use this method as a |
|
103 |
* timed que like the indexing process |
|
104 |
*/ |
|
105 |
public void addToUpdateQue( String docid ) { |
|
106 |
harvestDocument(docid); |
|
107 |
} |
|
108 |
|
|
109 |
/* |
|
110 |
* Delete from spatial cache |
|
111 |
* Just a wrapper around delete for now |
|
112 |
*/ |
|
113 |
public void addToDeleteQue( String docid ) { |
|
114 |
deleteDocument(docid); |
|
115 |
} |
|
116 |
|
|
117 |
/* |
|
118 |
* Given a docid, will attempt to delete |
|
119 |
* from spatial cache |
|
120 |
*/ |
|
121 |
public void deleteDocument( String docid ) { |
|
122 |
|
|
123 |
// Read the existing spatial dataset cache |
|
124 |
SpatialDataset sds = new SpatialDataset(); |
|
125 |
|
|
126 |
try { |
|
127 |
// Delete both the polygon(s) and point(s) |
|
128 |
sds.delete( "polygon" , docid ); |
|
129 |
sds.delete( "point" , docid ); |
|
130 |
} catch (IOException e) { |
|
131 |
log.error("IOException while deleting from spatial cache"); |
|
132 |
} |
|
133 |
|
|
134 |
log.info(" --------- Spatial Harvester - Deleted from spatial cache : " + docid ); |
|
135 |
} |
|
136 |
|
|
137 |
|
|
138 |
/* |
|
139 |
* Given a docid, will update the spatial cache accordingly |
|
140 |
*/ |
|
141 |
public void harvestDocument( String docid ) { |
|
142 |
|
|
143 |
// Read the existing spatial dataset cache |
|
144 |
SpatialDataset sds = new SpatialDataset(); |
|
145 |
|
|
146 |
// insert OR update the spatial cache |
|
147 |
// SpatialDataset.insertOrUpdate takes care of the difference |
|
148 |
SpatialDocument sdoc = new SpatialDocument( docid, dbconn ); |
|
149 |
|
|
150 |
try { |
|
151 |
Feature polygonFeature = sdoc.getPolygonFeature(); |
|
152 |
sds.insertOrUpdate("polygon", polygonFeature, docid ); |
|
153 |
|
|
154 |
Feature pointFeature = sdoc.getPointFeature(); |
|
155 |
sds.insertOrUpdate("point", pointFeature, docid ); |
|
156 |
log.info(" --------- Spatial Harvester - spatial cache updated for : " + docid ); |
|
157 |
} catch (IOException e) { |
|
158 |
log.error("IOException while performing spatial harvest "); |
|
159 |
} |
|
160 |
} |
|
161 |
|
|
162 |
/* |
|
163 |
* Completely regenerates the spatial cache |
|
164 |
*/ |
|
165 |
public void regenerate() { |
|
166 |
|
|
167 |
// Create new Spatial Dataset |
|
168 |
SpatialDataset sds = new SpatialDataset(); |
|
169 |
|
|
170 |
// Get list of all docids in the database |
|
171 |
Vector docids = queryAllDocids(); |
|
172 |
|
|
173 |
for (int i = 0; i < docids.size(); i++) { |
|
174 |
SpatialDocument sdoc = new SpatialDocument( (String)docids.elementAt(i), dbconn ); |
|
175 |
|
|
176 |
// Get the polygon representation of SpatialDocument |
|
177 |
// and add it to the spatial dataset |
|
178 |
Feature polygonFeature = sdoc.getPolygonFeature(); |
|
179 |
sds.add("polygon", polygonFeature ); |
|
180 |
|
|
181 |
// Get the point representation of SpatialDocument |
|
182 |
// and add it to the spatial dataset |
|
183 |
Feature pointFeature = sdoc.getPointFeature(); |
|
184 |
sds.add("point", pointFeature ); |
|
185 |
|
|
186 |
log.info(" ****** Spatial harvest of docid " + docids.elementAt(i) ); |
|
187 |
} |
|
188 |
|
|
189 |
// save SpatialDataset |
|
190 |
sds.save(); |
|
191 |
|
|
192 |
} |
|
193 |
|
|
194 |
} |
|
0 | 195 |
src/edu/ucsb/nceas/metacat/spatial/SpatialFeatureSchema.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Copyright: 2003 Regents of the University of California. |
|
4 |
* |
|
5 |
* Author: Matthew Perry |
|
6 |
* '$Date$' |
|
7 |
* '$Revision$' |
|
8 |
* |
|
9 |
* This program is free software; you can redistribute it and/or modify |
|
10 |
* it under the terms of the GNU General Public License as published by |
|
11 |
* the Free Software Foundation; either version 2 of the License, or |
|
12 |
* (at your option) any later version. |
|
13 |
* |
|
14 |
* This program is distributed in the hope that it will be useful, |
|
15 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
16 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
17 |
* GNU General Public License for more details. |
|
18 |
* |
|
19 |
* You should have received a copy of the GNU General Public License |
|
20 |
* along with this program; if not, write to the Free Software |
|
21 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
22 |
*/ |
|
23 |
package edu.ucsb.nceas.metacat.spatial; |
|
24 |
|
|
25 |
import edu.ucsb.nceas.metacat.MetaCatUtil; |
|
26 |
|
|
27 |
import com.vividsolutions.jts.geom.MultiPolygon; |
|
28 |
import com.vividsolutions.jts.geom.MultiPoint; |
|
29 |
import org.geotools.feature.AttributeType; |
|
30 |
import org.geotools.feature.AttributeTypeFactory; |
|
31 |
import org.geotools.feature.FeatureType; |
|
32 |
import org.geotools.feature.FeatureTypeFactory; |
|
33 |
import org.geotools.feature.SchemaException; |
|
34 |
|
|
35 |
import org.apache.log4j.Logger; |
|
36 |
|
|
37 |
|
|
38 |
public class SpatialFeatureSchema { |
|
39 |
|
|
40 |
private static Logger log = |
|
41 |
Logger.getLogger(SpatialFeatureSchema.class.getName()); |
|
42 |
|
|
43 |
public static String polygonShpUri = MetaCatUtil.getOption("certPath") + "data/metacat_shps/data_bounds.shp"; |
|
44 |
public static String pointShpUri = MetaCatUtil.getOption("certPath")+ "data/metacat_shps/data_points.shp"; |
|
45 |
|
|
46 |
// EPSG for latlong coordinate system w/ WGS84 datum |
|
47 |
public static int srid = 4326; |
|
48 |
|
|
49 |
/** empty constructor **/ |
|
50 |
public SpatialFeatureSchema() { |
|
51 |
|
|
52 |
} |
|
53 |
|
|
54 |
/* |
|
55 |
* Creates the featuretype schema for polygon bounds |
|
56 |
*/ |
|
57 |
public static FeatureType getPolygonFeatureType() { |
|
58 |
try { |
|
59 |
AttributeType[] types = new AttributeType[4]; |
|
60 |
types[0] = AttributeTypeFactory.newAttributeType("the_geom", com.vividsolutions.jts.geom.MultiPolygon.class); |
|
61 |
types[1] = AttributeTypeFactory.newAttributeType("docid", String.class); |
|
62 |
types[2] = AttributeTypeFactory.newAttributeType("url", String.class); |
|
63 |
types[3] = AttributeTypeFactory.newAttributeType("title", String.class); |
|
64 |
FeatureType boundsType = FeatureTypeFactory.newFeatureType(types, "bounds"); |
|
65 |
return boundsType; |
|
66 |
} catch(SchemaException e) { |
|
67 |
log.error("schema exception : "+e); |
|
68 |
return null; |
|
69 |
} |
|
70 |
} |
|
71 |
|
|
72 |
/* |
|
73 |
* Creates the featuretype schema for point centroids |
|
74 |
*/ |
|
75 |
public static FeatureType getPointFeatureType() { |
|
76 |
try { |
|
77 |
AttributeType[] types = new AttributeType[4]; |
|
78 |
types[0] = AttributeTypeFactory.newAttributeType("the_geom", com.vividsolutions.jts.geom.MultiPoint.class); |
|
79 |
types[1] = AttributeTypeFactory.newAttributeType("docid", String.class); |
|
80 |
types[2] = AttributeTypeFactory.newAttributeType("url", String.class); |
|
81 |
types[3] = AttributeTypeFactory.newAttributeType("title", String.class); |
|
82 |
FeatureType centroidsType = FeatureTypeFactory.newFeatureType(types, "centroids"); |
|
83 |
return centroidsType; |
|
84 |
} catch(SchemaException e) { |
|
85 |
log.error("schema exception : "+e); |
|
86 |
return null; |
|
87 |
} |
|
88 |
} |
|
89 |
|
|
90 |
} |
|
0 | 91 |
src/edu/ucsb/nceas/metacat/spatial/SpatialDocument.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Copyright: 2003 Regents of the University of California. |
|
4 |
* |
|
5 |
* Author: Matthew Perry |
|
6 |
* '$Date$' |
|
7 |
* '$Revision$' |
|
8 |
* |
|
9 |
* This program is free software; you can redistribute it and/or modify |
|
10 |
* it under the terms of the GNU General Public License as published by |
|
11 |
* the Free Software Foundation; either version 2 of the License, or |
|
12 |
* (at your option) any later version. |
|
13 |
* |
|
14 |
* This program is distributed in the hope that it will be useful, |
|
15 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
16 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
17 |
* GNU General Public License for more details. |
|
18 |
* |
|
19 |
* You should have received a copy of the GNU General Public License |
|
20 |
* along with this program; if not, write to the Free Software |
|
21 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
22 |
*/ |
|
23 |
package edu.ucsb.nceas.metacat.spatial; |
|
24 |
|
|
25 |
import java.io.File; |
|
26 |
|
|
27 |
import edu.ucsb.nceas.metacat.DBConnection; |
|
28 |
import edu.ucsb.nceas.metacat.MetaCatUtil; |
|
29 |
|
|
30 |
import com.vividsolutions.jts.geom.Coordinate; |
|
31 |
import com.vividsolutions.jts.geom.Point; |
|
32 |
import com.vividsolutions.jts.geom.Polygon; |
|
33 |
import com.vividsolutions.jts.geom.MultiPolygon; |
|
34 |
import com.vividsolutions.jts.geom.MultiPoint; |
|
35 |
import com.vividsolutions.jts.geom.GeometryFactory; |
|
36 |
import com.vividsolutions.jts.geom.PrecisionModel; |
|
37 |
|
|
38 |
import org.geotools.feature.AttributeType; |
|
39 |
import org.geotools.feature.AttributeTypeFactory; |
|
40 |
import org.geotools.feature.Feature; |
|
41 |
import org.geotools.feature.FeatureType; |
|
42 |
import org.geotools.feature.FeatureTypeFactory; |
|
43 |
import org.geotools.feature.SchemaException; |
|
44 |
|
|
45 |
import java.sql.ResultSet; |
|
46 |
import java.sql.PreparedStatement; |
|
47 |
import java.util.Vector; |
|
48 |
|
|
49 |
import org.apache.log4j.Logger; |
|
50 |
|
|
51 |
public class SpatialDocument { |
|
52 |
|
|
53 |
private DBConnection dbconn; |
|
54 |
|
|
55 |
private static Logger log = |
|
56 |
Logger.getLogger(SpatialDocument.class.getName()); |
|
57 |
|
|
58 |
private SpatialFeatureSchema featureSchema = new SpatialFeatureSchema(); |
|
59 |
|
|
60 |
Vector west = new Vector(); |
|
61 |
Vector south = new Vector(); |
|
62 |
Vector east = new Vector(); |
|
63 |
Vector north = new Vector(); |
|
64 |
|
|
65 |
String title = null; |
|
66 |
String docid = null; |
|
67 |
|
|
68 |
/** constructor that queries the db **/ |
|
69 |
public SpatialDocument( String docid , DBConnection dbconn ) { |
|
70 |
|
|
71 |
this.docid = docid; |
|
72 |
PreparedStatement pstmt = null; |
|
73 |
ResultSet rs = null; |
|
74 |
this.dbconn = dbconn; |
|
75 |
|
|
76 |
/* |
|
77 |
* Get the bounding coordinates |
|
78 |
*/ |
|
79 |
String query = "SELECT path, nodedatanumerical, parentnodeid FROM xml_path_index" |
|
80 |
+ " WHERE docid = '" + docid.trim() + "'" |
|
81 |
+ " AND (path = '" + MetaCatUtil.getOption("westBoundingCoordinatePath") + "'" |
|
82 |
+ " OR path = '" + MetaCatUtil.getOption("southBoundingCoordinatePath") + "'" |
|
83 |
+ " OR path = '" + MetaCatUtil.getOption("eastBoundingCoordinatePath") + "'" |
|
84 |
+ " OR path = '" + MetaCatUtil.getOption("northBoundingCoordinatePath") + "'" |
|
85 |
+ " ) ORDER BY parentnodeid;"; |
|
86 |
|
|
87 |
try { |
|
88 |
pstmt = dbconn.prepareStatement(query); |
|
89 |
pstmt.execute(); |
|
90 |
rs = pstmt.getResultSet(); |
|
91 |
while (rs.next()) { |
|
92 |
if ( rs.getString(1).equals( MetaCatUtil.getOption("westBoundingCoordinatePath") ) ) |
|
93 |
this.west.add( rs.getFloat(2) ); |
|
94 |
else if ( rs.getString(1).equals( MetaCatUtil.getOption("southBoundingCoordinatePath") ) ) |
|
95 |
this.south.add( rs.getFloat(2) ); |
|
96 |
else if ( rs.getString(1).equals( MetaCatUtil.getOption("eastBoundingCoordinatePath") ) ) |
|
97 |
this.east.add( rs.getFloat(2) ); |
|
98 |
else if ( rs.getString(1).equals( MetaCatUtil.getOption("northBoundingCoordinatePath") ) ) |
|
99 |
this.north.add( rs.getFloat(2) ); |
|
100 |
else |
|
101 |
log.error("** An xml path not related to your bounding coordinates was returned by this query \n" + query + "\n"); |
|
102 |
} |
|
103 |
rs.close(); |
|
104 |
pstmt.close(); |
|
105 |
} |
|
106 |
catch(Exception e) { |
|
107 |
log.error(" ---- Error getting bounding coordinates for " + docid); |
|
108 |
e.printStackTrace(); |
|
109 |
} |
|
110 |
|
|
111 |
// Get the Title |
|
112 |
query = "select docid, nodedata, nodeid " |
|
113 |
+ "from xml_nodes " |
|
114 |
+ "where parentnodeid = " |
|
115 |
+ " ( select nodeid " |
|
116 |
+ " from xml_nodes " |
|
117 |
+ " where docid like '" + docid.trim() +"' " |
|
118 |
+ " and nodename like 'title%' " |
|
119 |
+ " and parentnodeid = " |
|
120 |
+ " ( select nodeid " |
|
121 |
+ " from xml_nodes " |
|
122 |
+ " where docid like '" + docid.trim() + "' " |
|
123 |
+ " and nodename = 'dataset' " |
|
124 |
+ " limit 1) " |
|
125 |
+ " limit 1)" ; |
|
126 |
|
|
127 |
try { |
|
128 |
pstmt = dbconn.prepareStatement(query); |
|
129 |
pstmt.execute(); |
|
130 |
rs = pstmt.getResultSet(); |
|
131 |
if (rs.next()) |
|
132 |
this.title = rs.getString(2); |
|
133 |
rs.close(); |
|
134 |
pstmt.close(); |
|
135 |
} |
|
136 |
catch(Exception e) { |
|
137 |
log.error(" **** Error getting docids from getTitle for docid = "+docid); |
|
138 |
e.printStackTrace(); |
|
139 |
log.error(" query ============== \n " + query); |
|
140 |
this.title = docid; |
|
141 |
} |
|
142 |
|
|
143 |
/* try { |
|
144 |
dbconn.close(); |
|
145 |
} catch( java.sql.SQLException e ) { |
|
146 |
log.error("java.sql.SQLException"); |
|
147 |
} |
|
148 |
*/ |
|
149 |
|
|
150 |
} |
|
151 |
|
|
152 |
/* |
|
153 |
* Returns a jts (multi)polygon feature with geometry plus attributes |
|
154 |
* ready to be inserted into our spatial dataset cache |
|
155 |
*/ |
|
156 |
public Feature getPolygonFeature() { |
|
157 |
// Get polygon feature type |
|
158 |
FeatureType polyType = featureSchema.getPolygonFeatureType(); |
|
159 |
|
|
160 |
MultiPolygon theGeom = getPolygonGeometry(); |
|
161 |
if (theGeom == null) |
|
162 |
return null; |
|
163 |
|
|
164 |
// Populate the feature schema |
|
165 |
try { |
|
166 |
Feature polyFeature = polyType.create(new Object[]{ |
|
167 |
theGeom, |
|
168 |
this.docid, |
|
169 |
getUrl(this.docid), |
|
170 |
this.title }); |
|
171 |
return polyFeature; |
|
172 |
} catch (org.geotools.feature.IllegalAttributeException e) { |
|
173 |
log.error("!!!!!!! org.geotools.feature.IllegalAttributeException"); |
|
174 |
return null; |
|
175 |
} |
|
176 |
} |
|
177 |
|
|
178 |
/* |
|
179 |
* Returns a jts (multi)point feature with geometry plus attributes |
|
180 |
* ready to be inserted into our spatial dataset cache |
|
181 |
*/ |
|
182 |
public Feature getPointFeature() { |
|
183 |
// Get polygon feature type |
|
184 |
FeatureType pointType = featureSchema.getPointFeatureType(); |
|
185 |
|
|
186 |
MultiPoint theGeom = getPointGeometry(); |
|
187 |
if (theGeom == null) |
|
188 |
return null; |
|
189 |
|
|
190 |
// Populate the feature schema |
|
191 |
try { |
|
192 |
Feature pointFeature = pointType.create(new Object[]{ |
|
193 |
theGeom, |
|
194 |
this.docid, |
|
195 |
getUrl(this.docid), |
|
196 |
this.title }); |
|
197 |
return pointFeature; |
|
198 |
} catch (org.geotools.feature.IllegalAttributeException e) { |
|
199 |
log.error("!!!!!!! org.geotools.feature.IllegalAttributeException"); |
|
200 |
return null; |
|
201 |
} |
|
202 |
} |
|
203 |
|
|
204 |
/* |
|
205 |
* Given a valid docid, return an appropriate URL |
|
206 |
* for viewing the metadata document |
|
207 |
*/ |
|
208 |
private String getUrl( String docid ) { |
|
209 |
String docUrl = MetaCatUtil.getOption("metacatUrl") |
|
210 |
+ "?action=read" |
|
211 |
+ "&docid=" + docid |
|
212 |
+ "&qformat=" + MetaCatUtil.getOption("default-style"); |
|
213 |
|
|
214 |
return docUrl; |
|
215 |
} |
|
216 |
|
|
217 |
/** |
|
218 |
* returns the title of the docid |
|
219 |
*/ |
|
220 |
private String getTitle(String docid) { |
|
221 |
String title = null; |
|
222 |
PreparedStatement pstmt = null; |
|
223 |
ResultSet rs = null; |
|
224 |
String query = "select docid, nodedata, nodeid " |
|
225 |
+ "from xml_nodes " |
|
226 |
+ "where parentnodeid = " |
|
227 |
+ " ( select nodeid " |
|
228 |
+ " from xml_nodes " |
|
229 |
+ " where docid like '" + docid.trim() +"' " |
|
230 |
+ " and nodename like 'title%' " |
|
231 |
+ " and parentnodeid = " |
|
232 |
+ " ( select nodeid " |
|
233 |
+ " from xml_nodes " |
|
234 |
+ " where docid like '" + docid.trim() + "' " |
|
235 |
+ " and nodename = 'dataset' " |
|
236 |
+ " limit 1) " |
|
237 |
+ " limit 1)" ; |
|
238 |
|
|
239 |
/* |
|
240 |
* String query = "select docid, nodedata, nodeid from xml_nodes where " |
|
241 |
* + "nodeid =(select nodeid from xml_nodes where docid like '" |
|
242 |
* + docid.trim() + "' and nodename like 'title%');"; |
|
243 |
*/ |
|
244 |
|
|
245 |
try { |
|
246 |
dbconn = new DBConnection(); |
|
247 |
pstmt = dbconn.prepareStatement(query); |
|
248 |
pstmt.execute(); |
|
249 |
rs = pstmt.getResultSet(); |
|
250 |
if (rs.next()) |
|
251 |
title = rs.getString(2); |
|
252 |
rs.close(); |
|
253 |
pstmt.close(); |
|
254 |
dbconn.close(); |
|
255 |
} |
|
256 |
catch(Exception e) { |
|
257 |
log.error(" **** Error getting docids from getTitle for docid = "+docid); |
|
258 |
e.printStackTrace(); |
|
259 |
log.error(" query ============== \n " + query); |
|
260 |
title = docid; |
|
261 |
} |
|
262 |
|
|
263 |
return title; |
|
264 |
} |
|
265 |
|
|
266 |
|
|
267 |
/* |
|
268 |
* Returns polygon geometry |
|
269 |
*/ |
|
270 |
private MultiPolygon getPolygonGeometry() { |
|
271 |
|
|
272 |
PrecisionModel precModel = new PrecisionModel(); // default: Floating point |
|
273 |
GeometryFactory geomFac = new GeometryFactory( precModel, featureSchema.srid ); |
|
274 |
|
|
275 |
Vector polygons = new Vector(); |
|
276 |
|
|
277 |
if ( west.size() == south.size() && south.size() == east.size() && east.size() == north.size() ) { |
|
278 |
for (int i = 0; i < west.size(); i++) { |
|
279 |
|
|
280 |
Coordinate[] linestringCoordinates = new Coordinate[5]; |
|
281 |
|
|
282 |
// Check if it's actually a valid polygon |
|
283 |
if ( (Float)west.elementAt(i) == 0.0 && |
|
284 |
(Float)east.elementAt(i) == 0.0 && |
|
285 |
(Float)north.elementAt(i) == 0.0 && |
|
286 |
(Float)south.elementAt(i) == 0.0) { |
|
287 |
|
|
288 |
log.warn(" Invalid or empty coodinates ... skipping"); |
|
289 |
continue; |
|
290 |
} else if( ((Float)west.elementAt(i)).compareTo( (Float)east.elementAt(i) ) == 0 && |
|
291 |
((Float)north.elementAt(i)).compareTo( (Float)south.elementAt(i) ) == 0 ) { |
|
292 |
|
|
293 |
log.warn(" Point coordinates only.. skipping polygon generation"); |
|
294 |
continue; |
|
295 |
} |
|
296 |
|
|
297 |
linestringCoordinates[0] = new Coordinate( (Float)west.elementAt(i), (Float)south.elementAt(i) ); |
|
298 |
linestringCoordinates[1] = new Coordinate( (Float)west.elementAt(i), (Float)north.elementAt(i)); |
|
299 |
linestringCoordinates[2] = new Coordinate( (Float)east.elementAt(i), (Float)north.elementAt(i)); |
|
300 |
linestringCoordinates[3] = new Coordinate( (Float)east.elementAt(i), (Float)south.elementAt(i)); |
|
301 |
linestringCoordinates[4] = new Coordinate( (Float)west.elementAt(i), (Float)south.elementAt(i)); |
|
302 |
polygons.add( geomFac.createPolygon( geomFac.createLinearRing(linestringCoordinates), null) ); |
|
303 |
} |
|
304 |
} else { |
|
305 |
log.error(" *** Something went wrong.. your east,west,north and south bounding arrays are different sizes!"); |
|
306 |
} |
|
307 |
|
|
308 |
if( polygons.size() > 0 ) { |
|
309 |
Polygon[] polyArray = geomFac.toPolygonArray( polygons ); |
|
310 |
MultiPolygon multiPolyGeom= geomFac.createMultiPolygon( polyArray ); |
|
311 |
return multiPolyGeom; |
|
312 |
} else { |
|
313 |
return null; |
|
314 |
} |
|
315 |
|
|
316 |
} |
|
317 |
|
|
318 |
/* |
|
319 |
* returns a point geometry |
|
320 |
*/ |
|
321 |
private MultiPoint getPointGeometry() { |
|
322 |
|
|
323 |
PrecisionModel precModel = new PrecisionModel(); // default: Floating point |
|
324 |
GeometryFactory geomFac = new GeometryFactory( precModel, featureSchema.srid ); |
|
325 |
|
|
326 |
PreparedStatement pstmt = null; |
|
327 |
ResultSet rs = null; |
|
328 |
|
|
329 |
Vector points = new Vector(); |
|
330 |
|
|
331 |
if ( west.size() == south.size() && south.size() == east.size() && east.size() == north.size() ) { |
|
332 |
for (int i = 0; i < west.size(); i++) { |
|
333 |
|
|
334 |
// Check if it's actually a valid point |
|
335 |
if ( (Float)west.elementAt(i) == (float)0.0 && |
|
336 |
(Float)east.elementAt(i) == (float)0.0 && |
|
337 |
(Float)north.elementAt(i) == (float)0.0 && |
|
338 |
(Float)south.elementAt(i) == (float)0.0 ) { |
|
339 |
|
|
340 |
log.warn(" Invalid or empty coodinates ... skipping"); |
|
341 |
continue; |
|
342 |
} |
|
343 |
|
|
344 |
Double xCenter = ( (Float)west.elementAt(i) + (Float)east.elementAt(i) ) / (Double) 2.0; |
|
345 |
Double yCenter = ( (Float)south.elementAt(i) + (Float)north.elementAt(i) ) / (Double) 2.0; |
|
346 |
points.add( geomFac.createPoint( new Coordinate( xCenter, yCenter)) ); |
|
347 |
} |
|
348 |
} else { |
|
349 |
log.error(" *** Something went wrong.. your east,west,north and south bounding vectors are different sizes!"); |
|
350 |
} |
|
351 |
|
|
352 |
if( points.size() > 0 ) { |
|
353 |
Point[] pointArray = geomFac.toPointArray( points ); |
|
354 |
MultiPoint multiPointGeom= geomFac.createMultiPoint( pointArray ); |
|
355 |
return multiPointGeom; |
|
356 |
} else { |
|
357 |
return null; |
|
358 |
} |
|
359 |
|
|
360 |
|
|
361 |
} |
|
362 |
} |
|
0 | 363 |
Also available in: Unified diff
refactory java classes for spatial harvester