Revision 3246
Added by berkley over 17 years ago
src/edu/ucsb/nceas/metacat/MetacatResultSet.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Copyright: 2007 Regents of the University of California and the |
|
4 |
* National Center for Ecological Analysis and Synthesis |
|
5 |
* Authors: Chad Berkley |
|
6 |
* |
|
7 |
* '$Author$' |
|
8 |
* '$Date$' |
|
9 |
* '$Revision$' |
|
10 |
* |
|
11 |
* This program is free software; you can redistribute it and/or modify |
|
12 |
* it under the terms of the GNU General Public License as published by |
|
13 |
* the Free Software Foundation; either version 2 of the License, or |
|
14 |
* (at your option) any later version. |
|
15 |
* |
|
16 |
* This program is distributed in the hope that it will be useful, |
|
17 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
18 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
19 |
* GNU General Public License for more details. |
|
20 |
* |
|
21 |
* You should have received a copy of the GNU General Public License |
|
22 |
* along with this program; if not, write to the Free Software |
|
23 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
24 |
*/ |
|
25 |
|
|
26 |
package edu.ucsb.nceas.metacat; |
|
27 |
|
|
28 |
import java.io.*; |
|
29 |
import java.util.*; |
|
30 |
|
|
31 |
import org.w3c.dom.*; |
|
32 |
import org.apache.xpath.*; |
|
33 |
|
|
34 |
import org.apache.log4j.Logger; |
|
35 |
|
|
36 |
import org.apache.log4j.Logger; |
|
37 |
import org.xml.sax.Attributes; |
|
38 |
import org.xml.sax.SAXException; |
|
39 |
import org.xml.sax.SAXParseException; |
|
40 |
import org.xml.sax.ext.DeclHandler; |
|
41 |
import org.xml.sax.ext.LexicalHandler; |
|
42 |
import org.xml.sax.helpers.DefaultHandler; |
|
43 |
import org.xml.sax.XMLReader; |
|
44 |
import org.xml.sax.helpers.XMLReaderFactory; |
|
45 |
import org.xml.sax.ContentHandler; |
|
46 |
import org.xml.sax.ErrorHandler; |
|
47 |
import org.xml.sax.InputSource; |
|
48 |
|
|
49 |
/** |
|
50 |
* this class implements a metacat resultset and can be serialized to xml |
|
51 |
* for printing to the servlet output stream. |
|
52 |
*/ |
|
53 |
public class MetacatResultSet extends DefaultHandler |
|
54 |
{ |
|
55 |
private Logger log = Logger.getLogger(MetacatResultSet.class); |
|
56 |
private Vector results = new Vector(); |
|
57 |
private Result currentResult = null; |
|
58 |
private String currentElementName = ""; |
|
59 |
Attributes atts = null; |
|
60 |
|
|
61 |
/** |
|
62 |
* constructor that can process a dom. WARNING: this is very slow. |
|
63 |
*/ |
|
64 |
public MetacatResultSet(Document d) |
|
65 |
throws Exception |
|
66 |
{ |
|
67 |
log.warn("#####################processing resultset..."); |
|
68 |
NodeList nl = XPathAPI.selectNodeList(d, "//document"); |
|
69 |
for(int i=0; i<nl.getLength(); i++) |
|
70 |
{ //get each of the document nodes |
|
71 |
Node docNode = nl.item(i); |
|
72 |
Node docidNode = XPathAPI.selectSingleNode(docNode, "./docid"); |
|
73 |
log.warn("processing " + docidNode.getFirstChild().getNodeValue()); |
|
74 |
Node docnameNode = XPathAPI.selectSingleNode(docNode, "./docname"); |
|
75 |
Node doctypeNode = XPathAPI.selectSingleNode(docNode, "./doctype"); |
|
76 |
Node createdateNode = XPathAPI.selectSingleNode(docNode, "./createdate"); |
|
77 |
Node updatedateNode = XPathAPI.selectSingleNode(docNode, "./updatedate"); |
|
78 |
//process the returnfields |
|
79 |
NodeList returnfieldNL = XPathAPI.selectNodeList(docNode, "./param"); |
|
80 |
Hashtable returnfieldHash = new Hashtable(); |
|
81 |
for(int j=0; j<returnfieldNL.getLength(); j++) |
|
82 |
{ |
|
83 |
Node returnfieldNode = returnfieldNL.item(j); |
|
84 |
Node nameNode = XPathAPI.selectSingleNode(returnfieldNode, "@name"); |
|
85 |
String value = returnfieldNode.getFirstChild().getNodeValue(); |
|
86 |
String name = nameNode.getNodeValue(); |
|
87 |
returnfieldHash.put(name, value); |
|
88 |
} |
|
89 |
|
|
90 |
Result r = new Result(docidNode.getFirstChild().getNodeValue(), |
|
91 |
docnameNode.getFirstChild().getNodeValue(), |
|
92 |
doctypeNode.getFirstChild().getNodeValue(), |
|
93 |
createdateNode.getFirstChild().getNodeValue(), |
|
94 |
updatedateNode.getFirstChild().getNodeValue(), |
|
95 |
returnfieldHash); |
|
96 |
addResult(r); |
|
97 |
} |
|
98 |
} |
|
99 |
|
|
100 |
/** |
|
101 |
* process a resultset using SAX |
|
102 |
*/ |
|
103 |
public MetacatResultSet(String xmlDoc) |
|
104 |
throws SAXException, IOException |
|
105 |
{ |
|
106 |
double mrsStart = System.currentTimeMillis() / 1000; |
|
107 |
log.warn("###############parsing for paged results....."); |
|
108 |
XMLReader parser = null; |
|
109 |
String parserName = MetaCatUtil.getOption("saxparser"); |
|
110 |
parser = XMLReaderFactory.createXMLReader(parserName); |
|
111 |
parser.setContentHandler(this); |
|
112 |
parser.setErrorHandler(this); |
|
113 |
parser.parse(new InputSource(new StringReader(xmlDoc))); |
|
114 |
double mrsStop = System.currentTimeMillis() / 1000; |
|
115 |
log.warn("##############done parsing for paged results. total time: " + |
|
116 |
(mrsStop - mrsStart) + " seconds"); |
|
117 |
|
|
118 |
} |
|
119 |
|
|
120 |
/** |
|
121 |
* process the beginning of an element |
|
122 |
*/ |
|
123 |
public void startElement(String uri, String localName, String qName, |
|
124 |
Attributes atts) throws SAXException |
|
125 |
{ |
|
126 |
//get the attributes |
|
127 |
this.atts = atts; |
|
128 |
if(localName.equals("document")) |
|
129 |
{ |
|
130 |
currentResult = new Result(); |
|
131 |
} |
|
132 |
else if(localName.equals("docid") || |
|
133 |
localName.equals("docname") || |
|
134 |
localName.equals("doctype") || |
|
135 |
localName.equals("createdate") || |
|
136 |
localName.equals("updatedate") || |
|
137 |
localName.equals("param")) |
|
138 |
{ //set the current element name |
|
139 |
currentElementName = localName; |
|
140 |
} |
|
141 |
} |
|
142 |
|
|
143 |
/** |
|
144 |
* process the end of an element |
|
145 |
*/ |
|
146 |
public void endElement(String uri, String localName, String qName) |
|
147 |
throws SAXException |
|
148 |
{ |
|
149 |
if(localName.equals("document")) |
|
150 |
{ //if we're closing a document, save the result |
|
151 |
addResult(new Result(currentResult)); |
|
152 |
currentResult = new Result(); |
|
153 |
currentElementName = ""; |
|
154 |
} |
|
155 |
} |
|
156 |
|
|
157 |
/** |
|
158 |
* process character data |
|
159 |
*/ |
|
160 |
public void characters(char[] cbuf, int start, int len) |
|
161 |
throws SAXException |
|
162 |
{ //get the character data for each node we care about |
|
163 |
String s = new String(cbuf, start, len); |
|
164 |
if(currentElementName.equals("docid")) |
|
165 |
{ |
|
166 |
currentResult.docid = s; |
|
167 |
} |
|
168 |
else if(currentElementName.equals("docname")) |
|
169 |
{ |
|
170 |
currentResult.docname = s; |
|
171 |
} |
|
172 |
else if(currentElementName.equals("doctype")) |
|
173 |
{ |
|
174 |
currentResult.doctype = s; |
|
175 |
} |
|
176 |
else if(currentElementName.equals("createdate")) |
|
177 |
{ |
|
178 |
currentResult.createDate = s; |
|
179 |
} |
|
180 |
else if(currentElementName.equals("updatedate")) |
|
181 |
{ |
|
182 |
currentResult.updateDate = s; |
|
183 |
} |
|
184 |
else if(currentElementName.equals("param")) |
|
185 |
{ //add the returnfields to the hashtable |
|
186 |
for (int i = 0; i < atts.getLength(); i++) |
|
187 |
{ |
|
188 |
String attributeName = atts.getQName(i); |
|
189 |
String attributeValue = null; |
|
190 |
if(attributeName.equals("name")) |
|
191 |
{ |
|
192 |
attributeValue = atts.getValue(i); |
|
193 |
} |
|
194 |
currentResult.returnfields.put(attributeValue, s); |
|
195 |
} |
|
196 |
} |
|
197 |
} |
|
198 |
|
|
199 |
/** |
|
200 |
* SAX Handler that receives notification of fatal parsing errors |
|
201 |
*/ |
|
202 |
public void fatalError(SAXParseException exception) throws SAXException |
|
203 |
{ |
|
204 |
log.fatal("FATALERROR while parsing/caching resultset: " + |
|
205 |
exception.getMessage()); |
|
206 |
throw (new SAXException("Fatal error processing/caching resultset.", |
|
207 |
exception)); |
|
208 |
} |
|
209 |
|
|
210 |
/** |
|
211 |
* SAX Handler that receives notification of recoverable parsing errors |
|
212 |
*/ |
|
213 |
public void error(SAXParseException exception) throws SAXException |
|
214 |
{ |
|
215 |
log.error("ERROR while parsing/caching resultset: " + |
|
216 |
exception.getMessage()); |
|
217 |
throw (new SAXException("Error in processing/caching resultset.", |
|
218 |
exception)); |
|
219 |
} |
|
220 |
|
|
221 |
/** |
|
222 |
* SAX Handler that receives notification of warnings |
|
223 |
*/ |
|
224 |
public void warning(SAXParseException exception) throws SAXException |
|
225 |
{ |
|
226 |
log.warn("WARNING while parsing/caching resultset: " + |
|
227 |
exception.getMessage()); |
|
228 |
throw (new SAXException("Warning.", exception)); |
|
229 |
} |
|
230 |
|
|
231 |
/** |
|
232 |
* add a new result to the resultSet |
|
233 |
*/ |
|
234 |
public void addResult(Result r) |
|
235 |
{ |
|
236 |
results.addElement(r); |
|
237 |
} |
|
238 |
|
|
239 |
/** |
|
240 |
* returns a vector of the results |
|
241 |
*/ |
|
242 |
public Vector getResults() |
|
243 |
{ |
|
244 |
return results; |
|
245 |
} |
|
246 |
|
|
247 |
/** |
|
248 |
* serialize a selection of the results. This will print the results from |
|
249 |
* start to end-1. if end is 0, nothing will be printed. |
|
250 |
*/ |
|
251 |
public String serializeToXML(int start, int end) |
|
252 |
{ |
|
253 |
StringBuffer sb = new StringBuffer(); |
|
254 |
if(start > results.size() || end > results.size()) |
|
255 |
{ //make sure we don't go over the edge of the vector |
|
256 |
start = results.size() - 10; |
|
257 |
end = results.size(); |
|
258 |
} |
|
259 |
|
|
260 |
for(int i=start; i<end; i++) |
|
261 |
{ |
|
262 |
Result r = (Result)results.elementAt(i); |
|
263 |
sb.append(r.toString()); |
|
264 |
sb.append("\n"); |
|
265 |
} |
|
266 |
return sb.toString(); |
|
267 |
} |
|
268 |
|
|
269 |
/** |
|
270 |
* returns an xml representation of this object |
|
271 |
*/ |
|
272 |
public String toString() |
|
273 |
{ |
|
274 |
StringBuffer sb = new StringBuffer(); |
|
275 |
for(int i=0; i<results.size(); i++) |
|
276 |
{ |
|
277 |
Result r = (Result)results.elementAt(i); |
|
278 |
sb.append(r.toString()); |
|
279 |
sb.append("\n"); |
|
280 |
} |
|
281 |
return sb.toString(); |
|
282 |
} |
|
283 |
|
|
284 |
|
|
285 |
/** |
|
286 |
* a class to store one result |
|
287 |
*/ |
|
288 |
public class Result |
|
289 |
{ |
|
290 |
protected String docid; |
|
291 |
protected String docname; |
|
292 |
protected String doctype; |
|
293 |
protected String createDate; |
|
294 |
protected String updateDate; |
|
295 |
protected Hashtable returnfields; |
|
296 |
|
|
297 |
/** |
|
298 |
* copy constructor |
|
299 |
*/ |
|
300 |
public Result(Result r) |
|
301 |
{ |
|
302 |
if(r != null) |
|
303 |
{ |
|
304 |
docid = r.docid; |
|
305 |
docname = r.docname; |
|
306 |
doctype = r.doctype; |
|
307 |
createDate = r.createDate; |
|
308 |
updateDate = r.updateDate; |
|
309 |
returnfields = r.returnfields; |
|
310 |
} |
|
311 |
} |
|
312 |
|
|
313 |
/** |
|
314 |
* constructor |
|
315 |
*/ |
|
316 |
public Result(String docid, String docname, String doctype, |
|
317 |
String createDate, String updateDate, Hashtable returnfields) |
|
318 |
{ |
|
319 |
this.docid = docid; |
|
320 |
this.doctype = doctype; |
|
321 |
this.createDate = createDate; |
|
322 |
this.updateDate = updateDate; |
|
323 |
this.returnfields = returnfields; |
|
324 |
} |
|
325 |
|
|
326 |
/** |
|
327 |
* default constructor |
|
328 |
*/ |
|
329 |
public Result() |
|
330 |
{ |
|
331 |
returnfields = new Hashtable(); |
|
332 |
} |
|
333 |
|
|
334 |
/** |
|
335 |
* returns serialized version of this result |
|
336 |
*/ |
|
337 |
public String toString() |
|
338 |
{ |
|
339 |
StringBuffer sb = new StringBuffer(); |
|
340 |
sb.append("<document>\n"); |
|
341 |
sb.append(" <docid>" + docid + "</docid>\n"); |
|
342 |
sb.append(" <docname>" + docname + "</docname>\n"); |
|
343 |
sb.append(" <doctype>" + doctype + "</doctype>\n"); |
|
344 |
sb.append(" <createdate>" + createDate + "</createdate>\n"); |
|
345 |
sb.append(" <updatedate>" + updateDate + "</updatedate>\n"); |
|
346 |
|
|
347 |
Enumeration keys = returnfields.keys(); |
|
348 |
while(keys.hasMoreElements()) |
|
349 |
{ |
|
350 |
String key = (String)keys.nextElement(); |
|
351 |
String value = (String)returnfields.get(key); |
|
352 |
sb.append(" <param name=\"" + key + "\">" + value + "</param>\n"); |
|
353 |
} |
|
354 |
sb.append("</document>"); |
|
355 |
return sb.toString(); |
|
356 |
} |
|
357 |
} |
|
358 |
} |
|
359 | 0 |
src/edu/ucsb/nceas/metacat/QuerySpecification.java | ||
---|---|---|
376 | 376 |
accessQuery = " AND (docid IN (" + allow + ")" |
377 | 377 |
+ " AND docid NOT IN (" + deny + "))"; |
378 | 378 |
} |
379 |
logMetacat.warn("accessquery is: " + accessQuery);
|
|
379 |
logMetacat.info("accessquery is: " + accessQuery);
|
|
380 | 380 |
return accessQuery; |
381 | 381 |
} |
382 | 382 |
|
... | ... | |
1191 | 1191 |
self.append(" AND xml_nodes.rootnodeid = xml_documents.rootnodeid"); |
1192 | 1192 |
} |
1193 | 1193 |
|
1194 |
logMetacat.warn("Attribute query: " + self.toString());
|
|
1194 |
logMetacat.info("Attribute query: " + self.toString());
|
|
1195 | 1195 |
|
1196 | 1196 |
return self.toString(); |
1197 | 1197 |
} |
src/edu/ucsb/nceas/metacat/DBQuery.java | ||
---|---|---|
30 | 30 |
|
31 | 31 |
package edu.ucsb.nceas.metacat; |
32 | 32 |
|
33 |
import java.io.BufferedWriter; |
|
34 |
import java.io.File; |
|
35 |
import java.io.FileInputStream; |
|
36 |
import java.io.FileOutputStream; |
|
37 |
import java.io.FileReader; |
|
38 |
import java.io.FileWriter; |
|
39 |
import java.io.IOException; |
|
40 |
import java.io.InputStream; |
|
41 |
import java.io.PrintWriter; |
|
42 |
import java.io.StringReader; |
|
43 |
import java.io.StringWriter; |
|
44 |
import java.io.OutputStream; |
|
33 |
import java.io.*; |
|
34 |
import java.util.zip.*; |
|
45 | 35 |
import java.sql.PreparedStatement; |
46 | 36 |
import java.sql.ResultSet; |
47 | 37 |
import java.sql.SQLException; |
48 |
import java.util.Enumeration; |
|
49 |
import java.util.Hashtable; |
|
50 |
import java.util.StringTokenizer; |
|
51 |
import java.util.Vector; |
|
52 |
import java.util.zip.ZipEntry; |
|
53 |
import java.util.zip.ZipOutputStream; |
|
38 |
import java.util.*; |
|
54 | 39 |
|
55 | 40 |
import javax.servlet.ServletOutputStream; |
56 | 41 |
import javax.servlet.http.HttpServletResponse; |
... | ... | |
282 | 267 |
// get query and qformat |
283 | 268 |
String xmlquery = ((String[])params.get("query"))[0]; |
284 | 269 |
|
285 |
logMetacat.warn("SESSIONID: " + sessionid);
|
|
286 |
logMetacat.warn("xmlquery: " + xmlquery);
|
|
270 |
logMetacat.info("SESSIONID: " + sessionid);
|
|
271 |
logMetacat.info("xmlquery: " + xmlquery);
|
|
287 | 272 |
String qformat = ((String[])params.get("qformat"))[0]; |
288 |
logMetacat.warn("qformat: " + qformat);
|
|
273 |
logMetacat.info("qformat: " + qformat);
|
|
289 | 274 |
// Get the XML query and covert it into a SQL statment |
290 | 275 |
QuerySpecification qspec = null; |
291 | 276 |
if ( xmlquery != null) |
... | ... | |
350 | 335 |
}//else |
351 | 336 |
|
352 | 337 |
} |
353 |
|
|
354 |
/** |
|
355 |
* this method parses the xml results in the string buffer and returns |
|
356 |
* just those required by the paging params. |
|
357 |
*/ |
|
358 |
private StringBuffer getPagedResult(MetacatResultSet mrs, int pagestart, |
|
359 |
int pagesize) |
|
360 |
{ |
|
361 |
//logMetacat.warn(mrs.toString()); |
|
362 |
if(pagesize == 0) |
|
363 |
{ //if pagesize is 0 then we return the whole resultset |
|
364 |
return new StringBuffer(mrs.toString()); |
|
365 |
} |
|
366 |
|
|
367 |
return new StringBuffer(mrs.serializeToXML(pagestart, pagestart + pagesize)); |
|
368 |
} |
|
369 | 338 |
|
370 |
|
|
371 | 339 |
/** |
372 | 340 |
* Transforms a hashtable of documents to an xml or html result and sent |
373 | 341 |
* the content to outputstream. Keep going untill hastable is empty. stop it. |
... | ... | |
418 | 386 |
cachedQuerySpec = (QuerySpecification)sess.getAttribute("query"); |
419 | 387 |
} |
420 | 388 |
|
421 |
if(cachedQuerySpec != null && |
|
389 |
/*if(cachedQuerySpec != null &&
|
|
422 | 390 |
cachedQuerySpec.printSQL(false).equals(qspec.printSQL(false))) |
423 | 391 |
{ //use the cached resultset if the query was the same as the last |
424 | 392 |
MetacatResultSet mrs = (MetacatResultSet)sess.getAttribute("results"); |
... | ... | |
444 | 412 |
returnString += "\n</resultset>\n"; |
445 | 413 |
return new StringBuffer(returnString); |
446 | 414 |
} |
447 |
} |
|
415 |
}*/
|
|
448 | 416 |
|
449 | 417 |
//no cached results...go on with a normal query |
450 | 418 |
|
... | ... | |
500 | 468 |
out.println(closeRestultset); |
501 | 469 |
} |
502 | 470 |
|
503 |
try |
|
471 |
/*try
|
|
504 | 472 |
{ |
505 | 473 |
//cache the query result and the query |
506 | 474 |
logMetacat.warn("#################Caching query and resultset"); |
... | ... | |
514 | 482 |
returnString += pagedResultBuffer.toString(); |
515 | 483 |
returnString += "\n</resultset>\n"; |
516 | 484 |
return new StringBuffer(returnString); |
485 |
|
|
517 | 486 |
} |
518 | 487 |
catch(Exception e) |
519 | 488 |
{ |
520 | 489 |
logMetacat.error("Could not parse resultset: " + e.getMessage()); |
521 | 490 |
//e.printStackTrace(); |
522 |
} |
|
491 |
}*/
|
|
523 | 492 |
|
524 | 493 |
//default to returning the whole resultset |
525 | 494 |
return resultset; |
... | ... | |
539 | 508 |
String query = null; |
540 | 509 |
int count = 0; |
541 | 510 |
int index = 0; |
542 |
Hashtable docListResult = new Hashtable();
|
|
511 |
ResultDocumentSet docListResult = new ResultDocumentSet();
|
|
543 | 512 |
PreparedStatement pstmt = null; |
544 | 513 |
String docid = null; |
545 | 514 |
String docname = null; |
... | ... | |
588 | 557 |
queryBuffer.append( "'') " ); |
589 | 558 |
query = queryBuffer.toString(); |
590 | 559 |
} |
591 |
|
|
592 | 560 |
String ownerQuery = getOwnerQuery(user); |
593 | 561 |
logMetacat.info("\n\n\n query: " + query); |
594 | 562 |
logMetacat.info("\n\n\n owner query: "+ownerQuery); |
... | ... | |
606 | 574 |
} else { |
607 | 575 |
query = query + accessQuery.substring(4, accessQuery.length()); |
608 | 576 |
} |
609 |
logMetacat.warn("\n\n\n final query: " + query);
|
|
577 |
logMetacat.info("\n\n\n final query: " + query);
|
|
610 | 578 |
} |
611 | 579 |
|
612 | 580 |
startTime = System.currentTimeMillis() / 1000; |
613 | 581 |
pstmt = dbconn.prepareStatement(query); |
614 | 582 |
rs = pstmt.executeQuery(); |
615 |
//now we need to process the resultset based on pagesize and pagestart |
|
616 |
//if they are not 0 |
|
583 |
|
|
617 | 584 |
double queryExecuteTime = System.currentTimeMillis() / 1000; |
618 | 585 |
logMetacat.warn("Time to execute query: " |
619 | 586 |
+ (queryExecuteTime - startTime)); |
620 |
boolean tableHasRows = rs.next(); |
|
587 |
|
|
588 |
boolean tableHasRows; |
|
589 |
|
|
590 |
//if(pagestart == 0) |
|
591 |
//{ //if we're on page 0 then move to the first record |
|
592 |
tableHasRows = rs.next(); |
|
593 |
//} |
|
594 |
|
|
595 |
/*for(int z=0; z<pagesize * pagestart; z++) |
|
596 |
{ //move to the start page |
|
597 |
logMetacat.warn("############moving to: " + z); |
|
598 |
tableHasRows = rs.next(); |
|
599 |
if(!tableHasRows) |
|
600 |
{ |
|
601 |
break; |
|
602 |
} |
|
603 |
}*/ |
|
604 |
|
|
605 |
if(pagesize == 0) |
|
606 |
{ //this makes sure we get all results if there is no paging |
|
607 |
pagesize = 99999; |
|
608 |
pagestart = 99999; |
|
609 |
} |
|
610 |
|
|
611 |
int currentIndex = 0; |
|
621 | 612 |
while (tableHasRows) |
613 |
//for(int z=pagestart * pagesize; z<(pagesize * pagestart) + pagesize;) |
|
622 | 614 |
{ |
615 |
logMetacat.warn("############getting result: " + currentIndex); |
|
623 | 616 |
docid = rs.getString(1).trim(); |
617 |
logMetacat.warn("############processing: " + docid); |
|
624 | 618 |
docname = rs.getString(2); |
625 | 619 |
doctype = rs.getString(3); |
620 |
logMetacat.warn("############processing: " + doctype); |
|
626 | 621 |
createDate = rs.getString(4); |
627 | 622 |
updateDate = rs.getString(5); |
628 | 623 |
rev = rs.getInt(6); |
629 |
|
|
624 |
|
|
630 | 625 |
// if there are returndocs to match, backtracking can be performed |
631 | 626 |
// otherwise, just return the document that was hit |
632 | 627 |
Vector returndocVec = qspec.getReturnDocList(); |
633 |
if (returndocVec.size() != 0 && !returndocVec.contains(doctype)
|
|
634 |
&& !qspec.isPercentageSearch())
|
|
628 |
if (returndocVec.size() != 0 && !returndocVec.contains(doctype) |
|
629 |
&& !qspec.isPercentageSearch()) |
|
635 | 630 |
{ |
636 | 631 |
logMetacat.warn("Back tracing now..."); |
637 | 632 |
String sep = MetaCatUtil.getOption("accNumSeparator"); |
... | ... | |
726 | 721 |
document.append("<updatedate>" + updateDate+ "</updatedate>"); |
727 | 722 |
} |
728 | 723 |
// Store the document id and the root node id |
729 |
docListResult.put(docid, (String) document.toString()); |
|
724 |
docListResult.addResultDocument( |
|
725 |
new ResultDocument(docid, (String) document.toString())); |
|
726 |
currentIndex++; |
|
727 |
logMetacat.warn("$$$$$$$real result: " + docid); |
|
730 | 728 |
count++; |
731 | 729 |
|
732 |
|
|
733 | 730 |
// Get the next package document linked to our hit |
734 | 731 |
hasBtRows = btrs.next(); |
735 | 732 |
}//while |
... | ... | |
738 | 735 |
} |
739 | 736 |
else if (returndocVec.size() == 0 || returndocVec.contains(doctype)) |
740 | 737 |
{ |
741 |
|
|
738 |
logMetacat.warn("NOT Back tracing now..."); |
|
742 | 739 |
document = new StringBuffer(); |
743 | 740 |
|
744 | 741 |
String completeDocid = docid |
... | ... | |
762 | 759 |
document.append("<updatedate>" + updateDate + "</updatedate>"); |
763 | 760 |
} |
764 | 761 |
// Store the document id and the root node id |
765 |
docListResult.put(docid, (String) document.toString()); |
|
762 |
|
|
763 |
docListResult.addResultDocument( |
|
764 |
new ResultDocument(docid, (String) document.toString())); |
|
765 |
logMetacat.warn("$$$$$$$real result: " + docid); |
|
766 |
currentIndex++; |
|
766 | 767 |
count++; |
767 |
|
|
768 | 768 |
}//else |
769 |
|
|
769 | 770 |
// when doclist reached the offset number, send out doc list and empty |
770 | 771 |
// the hash table |
771 |
if (count == offset) |
|
772 |
{ |
|
772 |
if (count == offset && pagesize == 0)
|
|
773 |
{ //if pagesize is not 0, do this later.
|
|
773 | 774 |
//reset count |
775 |
logMetacat.warn("############doing subset cache"); |
|
774 | 776 |
count = 0; |
775 |
handleSubsetResult(qspec,resultsetBuffer, out, docListResult, |
|
777 |
handleSubsetResult(qspec, resultsetBuffer, out, docListResult,
|
|
776 | 778 |
user, groups,dbconn, useXMLIndex); |
777 |
// reset docListResult |
|
778 |
docListResult = new Hashtable(); |
|
779 |
|
|
779 |
//reset docListResult |
|
780 |
docListResult = new ResultDocumentSet(); |
|
780 | 781 |
} |
782 |
|
|
783 |
logMetacat.warn("currentIndex: " + currentIndex); |
|
784 |
if(currentIndex >= ((pagesize * pagestart) + pagesize)) |
|
785 |
{ |
|
786 |
logMetacat.warn("docListResult: " + docListResult.toString()); |
|
787 |
ResultDocumentSet pagedResultsHash = new ResultDocumentSet(); |
|
788 |
for(int i=pagesize*pagestart; i<docListResult.size(); i++) |
|
789 |
{ |
|
790 |
pagedResultsHash.put(docListResult.get(i)); |
|
791 |
} |
|
792 |
|
|
793 |
docListResult = pagedResultsHash; |
|
794 |
break; |
|
795 |
} |
|
781 | 796 |
// Advance to the next record in the cursor |
782 | 797 |
tableHasRows = rs.next(); |
798 |
if(!tableHasRows) |
|
799 |
{ |
|
800 |
break; |
|
801 |
} |
|
783 | 802 |
}//while |
803 |
|
|
804 |
//subset the docListResult by pages |
|
805 |
/*Hashtable pagedResultsHash = new Hashtable(); |
|
806 |
Enumeration keys = docListResult.keys(); |
|
807 |
if(pagesize != 0) |
|
808 |
{ |
|
809 |
for(int z=0; z<pagesize * pagestart; z++) |
|
810 |
{ //move to the start page |
|
811 |
logMetacat.warn("############moving to: " + z); |
|
812 |
keys.nextElement(); |
|
813 |
} |
|
814 |
|
|
815 |
for(int z=pagestart * pagesize; z<(pagesize * pagestart) + pagesize; z++) |
|
816 |
{ |
|
817 |
try |
|
818 |
{ |
|
819 |
logMetacat.warn("############hashing result: " + z); |
|
820 |
String key = (String)keys.nextElement(); |
|
821 |
pagedResultsHash.put(key, docListResult.get(key)); |
|
822 |
} |
|
823 |
catch(java.util.NoSuchElementException nsee) |
|
824 |
{ |
|
825 |
break; |
|
826 |
} |
|
827 |
} |
|
828 |
docListResult = pagedResultsHash; |
|
829 |
}*/ |
|
830 |
|
|
784 | 831 |
rs.close(); |
785 | 832 |
pstmt.close(); |
786 | 833 |
//if docListResult is not empty, it need to be sent. |
787 |
if (!docListResult.isEmpty())
|
|
834 |
if (docListResult.size() != 0)
|
|
788 | 835 |
{ |
789 | 836 |
handleSubsetResult(qspec,resultsetBuffer, out, docListResult, |
790 | 837 |
user, groups,dbconn, useXMLIndex); |
... | ... | |
803 | 850 |
*/ |
804 | 851 |
private StringBuffer handleSubsetResult(QuerySpecification qspec, |
805 | 852 |
StringBuffer resultset, |
806 |
PrintWriter out, Hashtable partOfDoclist,
|
|
853 |
PrintWriter out, ResultDocumentSet partOfDoclist,
|
|
807 | 854 |
String user, String[]groups, |
808 | 855 |
DBConnection dbconn, boolean useXMLIndex) |
809 | 856 |
throws Exception |
810 | 857 |
{ |
811 |
|
|
858 |
|
|
812 | 859 |
// check if there is a record in xml_returnfield |
813 | 860 |
// and get the returnfield_id and usage count |
814 | 861 |
int usage_count = getXmlReturnfieldsTableId(qspec, dbconn); |
... | ... | |
842 | 889 |
// remove the keys in queryresultDocList from partOfDoclist |
843 | 890 |
Enumeration _keys = queryresultDocList.keys(); |
844 | 891 |
while (_keys.hasMoreElements()){ |
845 |
partOfDoclist.remove(_keys.nextElement()); |
|
892 |
partOfDoclist.remove((String)_keys.nextElement());
|
|
846 | 893 |
} |
847 | 894 |
|
848 | 895 |
// backup the keys-elements in partOfDoclist to check later |
849 | 896 |
// if the doc entry is indexed yet |
850 | 897 |
Hashtable partOfDoclistBackup = new Hashtable(); |
851 |
_keys = partOfDoclist.keys();
|
|
852 |
while (_keys.hasMoreElements()){
|
|
853 |
Object key = _keys.nextElement();
|
|
898 |
Iterator itt = partOfDoclist.getDocids();
|
|
899 |
while (itt.hasNext()){
|
|
900 |
Object key = itt.next();
|
|
854 | 901 |
partOfDoclistBackup.put(key, partOfDoclist.get(key)); |
855 | 902 |
} |
856 | 903 |
|
... | ... | |
860 | 907 |
|
861 | 908 |
//add return fields for the documents in partOfDoclist |
862 | 909 |
partOfDoclist = addReturnfield(partOfDoclist, qspec, user, groups, |
863 |
dbconn, useXMLIndex );
|
|
910 |
dbconn, useXMLIndex); |
|
864 | 911 |
//add relationship part part docid list for the documents in partOfDocList |
865 | 912 |
partOfDoclist = addRelationship(partOfDoclist, qspec, dbconn, useXMLIndex); |
866 | 913 |
|
867 | 914 |
|
868 |
Enumeration keys = partOfDoclist.keys();
|
|
915 |
Iterator keys = partOfDoclist.getDocids();
|
|
869 | 916 |
String key = null; |
870 | 917 |
String element = null; |
871 | 918 |
String query = null; |
872 | 919 |
int offset = (new Integer(MetaCatUtil |
873 | 920 |
.getOption("queryresult_string_length"))) |
874 | 921 |
.intValue(); |
875 |
while (keys.hasMoreElements())
|
|
922 |
while (keys.hasNext())
|
|
876 | 923 |
{ |
877 |
key = (String) keys.nextElement();
|
|
924 |
key = (String) keys.next(); |
|
878 | 925 |
element = (String)partOfDoclist.get(key); |
879 | 926 |
|
880 | 927 |
// check if the enterRecords is true, elements is not null, element's |
... | ... | |
909 | 956 |
}//while |
910 | 957 |
|
911 | 958 |
|
912 |
keys = queryresultDocList.keys();
|
|
913 |
while (keys.hasMoreElements()) |
|
959 |
Enumeration keysE = queryresultDocList.keys();
|
|
960 |
while (keysE.hasMoreElements())
|
|
914 | 961 |
{ |
915 |
key = (String) keys.nextElement(); |
|
962 |
key = (String) keysE.nextElement();
|
|
916 | 963 |
element = (String)queryresultDocList.get(key); |
917 | 964 |
// A string with element |
918 | 965 |
String xmlElement = " <document>" + element + "</document>"; |
... | ... | |
932 | 979 |
* queryresultstring as a hashtable |
933 | 980 |
*/ |
934 | 981 |
private Hashtable docidsInQueryresultTable(int returnfield_id, |
935 |
Hashtable partOfDoclist,
|
|
982 |
ResultDocumentSet partOfDoclist,
|
|
936 | 983 |
DBConnection dbconn){ |
937 | 984 |
|
938 | 985 |
Hashtable returnValue = new Hashtable(); |
... | ... | |
940 | 987 |
ResultSet rs = null; |
941 | 988 |
|
942 | 989 |
// get partOfDoclist as string for the query |
943 |
Enumeration keylist = partOfDoclist.keys();
|
|
990 |
Iterator keylist = partOfDoclist.getDocids();
|
|
944 | 991 |
StringBuffer doclist = new StringBuffer(); |
945 |
while (keylist.hasMoreElements())
|
|
992 |
while (keylist.hasNext())
|
|
946 | 993 |
{ |
947 | 994 |
doclist.append("'"); |
948 |
doclist.append((String) keylist.nextElement());
|
|
995 |
doclist.append((String) keylist.next()); |
|
949 | 996 |
doclist.append("',"); |
950 | 997 |
}//while |
951 | 998 |
|
... | ... | |
1088 | 1135 |
/* |
1089 | 1136 |
* A method to add return field to return doclist hash table |
1090 | 1137 |
*/ |
1091 |
private Hashtable addReturnfield(Hashtable docListResult,
|
|
1138 |
private ResultDocumentSet addReturnfield(ResultDocumentSet docListResult,
|
|
1092 | 1139 |
QuerySpecification qspec, |
1093 | 1140 |
String user, String[]groups, |
1094 | 1141 |
DBConnection dbconn, boolean useXMLIndex ) |
... | ... | |
1107 | 1154 |
qspec.setGroup(groups); |
1108 | 1155 |
Vector extendedFields = new Vector(qspec.getReturnFieldList()); |
1109 | 1156 |
Vector results = new Vector(); |
1110 |
Enumeration keylist = docListResult.keys();
|
|
1157 |
Iterator keylist = docListResult.getDocids();
|
|
1111 | 1158 |
StringBuffer doclist = new StringBuffer(); |
1112 | 1159 |
Vector parentidList = new Vector(); |
1113 | 1160 |
Hashtable returnFieldValue = new Hashtable(); |
1114 |
while (keylist.hasMoreElements())
|
|
1161 |
while (keylist.hasNext())
|
|
1115 | 1162 |
{ |
1116 | 1163 |
doclist.append("'"); |
1117 |
doclist.append((String) keylist.nextElement());
|
|
1164 |
doclist.append((String) keylist.next()); |
|
1118 | 1165 |
doclist.append("',"); |
1119 | 1166 |
} |
1120 | 1167 |
if (doclist.length() > 0) |
... | ... | |
1145 | 1192 |
|
1146 | 1193 |
String extendedQuery = |
1147 | 1194 |
qspec.printExtendedSQL(doclist.toString(), controlPairs, useXMLIndex); |
1148 |
logMetacat.warn("Extended query: " + extendedQuery);
|
|
1195 |
logMetacat.info("Extended query: " + extendedQuery);
|
|
1149 | 1196 |
|
1150 | 1197 |
if(extendedQuery != null){ |
1151 | 1198 |
pstmt = dbconn.prepareStatement(extendedQuery); |
... | ... | |
1215 | 1262 |
ReturnFieldValue object = |
1216 | 1263 |
(ReturnFieldValue) xmlFieldValue.nextElement(); |
1217 | 1264 |
docid = object.getDocid(); |
1218 |
if (docListResult.containsKey(docid)) {
|
|
1265 |
if (docListResult.containsDocid(docid)) {
|
|
1219 | 1266 |
String removedelement = (String) docListResult. |
1220 | 1267 |
remove(docid); |
1221 | 1268 |
docListResult. |
1222 |
put(docid,
|
|
1223 |
removedelement + object.getXMLFieldValue()); |
|
1269 |
addResultDocument(new ResultDocument(docid,
|
|
1270 |
removedelement + object.getXMLFieldValue()));
|
|
1224 | 1271 |
} |
1225 | 1272 |
else { |
1226 |
docListResult.put(docid, object.getXMLFieldValue()); |
|
1273 |
docListResult.addResultDocument( |
|
1274 |
new ResultDocument(docid, object.getXMLFieldValue())); |
|
1227 | 1275 |
} |
1228 | 1276 |
} //while |
1229 | 1277 |
double docListResultEnd = System.currentTimeMillis() / 1000; |
... | ... | |
1246 | 1294 |
/* |
1247 | 1295 |
* A method to add relationship to return doclist hash table |
1248 | 1296 |
*/ |
1249 |
private Hashtable addRelationship(Hashtable docListResult,
|
|
1297 |
private ResultDocumentSet addRelationship(ResultDocumentSet docListResult,
|
|
1250 | 1298 |
QuerySpecification qspec, |
1251 | 1299 |
DBConnection dbconn, boolean useXMLIndex ) |
1252 | 1300 |
throws Exception |
... | ... | |
1255 | 1303 |
ResultSet rs = null; |
1256 | 1304 |
StringBuffer document = null; |
1257 | 1305 |
double startRelation = System.currentTimeMillis() / 1000; |
1258 |
Enumeration docidkeys = docListResult.keys();
|
|
1259 |
while (docidkeys.hasMoreElements())
|
|
1306 |
Iterator docidkeys = docListResult.getDocids();
|
|
1307 |
while (docidkeys.hasNext())
|
|
1260 | 1308 |
{ |
1261 | 1309 |
//String connstring = |
1262 | 1310 |
// "metacat://"+util.getOption("server")+"?docid="; |
1263 | 1311 |
String connstring = "%docid="; |
1264 |
String docidkey = (String) docidkeys.nextElement();
|
|
1312 |
String docidkey = (String) docidkeys.next(); |
|
1265 | 1313 |
pstmt = dbconn.prepareStatement(QuerySpecification |
1266 | 1314 |
.printRelationSQL(docidkey)); |
1267 | 1315 |
pstmt.execute(); |
... | ... | |
1408 | 1456 |
* A method to return search result after running a query which return |
1409 | 1457 |
* field have attribue |
1410 | 1458 |
*/ |
1411 |
private Hashtable getAttributeValueForReturn(QuerySpecification squery,
|
|
1412 |
Hashtable docInformationList, String docList, boolean useXMLIndex)
|
|
1459 |
private ResultDocumentSet getAttributeValueForReturn(QuerySpecification squery,
|
|
1460 |
ResultDocumentSet docInformationList, String docList, boolean useXMLIndex)
|
|
1413 | 1461 |
{ |
1414 | 1462 |
StringBuffer XML = null; |
1415 | 1463 |
String sql = null; |
... | ... | |
1450 | 1498 |
XML.append("</param>"); |
1451 | 1499 |
tableHasRows = rs.next(); |
1452 | 1500 |
|
1453 |
if (docInformationList.containsKey(docid)) {
|
|
1501 |
if (docInformationList.containsDocid(docid)) {
|
|
1454 | 1502 |
String removedelement = (String) docInformationList |
1455 | 1503 |
.remove(docid); |
1456 | 1504 |
docInformationList.put(docid, removedelement |
... | ... | |
2476 | 2524 |
} |
2477 | 2525 |
|
2478 | 2526 |
} |
2527 |
|
|
2528 |
/** |
|
2529 |
* a class to store one result document consisting of a docid and a document |
|
2530 |
*/ |
|
2531 |
private class ResultDocument |
|
2532 |
{ |
|
2533 |
public String docid; |
|
2534 |
public String document; |
|
2535 |
|
|
2536 |
public ResultDocument(String docid, String document) |
|
2537 |
{ |
|
2538 |
this.docid = docid; |
|
2539 |
this.document = document; |
|
2540 |
} |
|
2541 |
} |
|
2542 |
|
|
2543 |
/** |
|
2544 |
* a private class to handle a set of resultDocuments |
|
2545 |
*/ |
|
2546 |
private class ResultDocumentSet |
|
2547 |
{ |
|
2548 |
private Vector docids; |
|
2549 |
private Vector documents; |
|
2550 |
|
|
2551 |
public ResultDocumentSet() |
|
2552 |
{ |
|
2553 |
docids = new Vector(); |
|
2554 |
documents = new Vector(); |
|
2555 |
} |
|
2556 |
|
|
2557 |
/** |
|
2558 |
* adds a result document to the set |
|
2559 |
*/ |
|
2560 |
public void addResultDocument(ResultDocument rd) |
|
2561 |
{ |
|
2562 |
if(rd.docid == null) |
|
2563 |
rd.docid = ""; |
|
2564 |
if(rd.document == null) |
|
2565 |
rd.document = ""; |
|
2566 |
|
|
2567 |
docids.addElement(rd.docid); |
|
2568 |
documents.addElement(rd.document); |
|
2569 |
} |
|
2570 |
|
|
2571 |
/** |
|
2572 |
* gets an iterator of docids |
|
2573 |
*/ |
|
2574 |
public Iterator getDocids() |
|
2575 |
{ |
|
2576 |
return docids.iterator(); |
|
2577 |
} |
|
2578 |
|
|
2579 |
/** |
|
2580 |
* gets an iterator of documents |
|
2581 |
*/ |
|
2582 |
public Iterator getDocuments() |
|
2583 |
{ |
|
2584 |
return documents.iterator(); |
|
2585 |
} |
|
2586 |
|
|
2587 |
/** |
|
2588 |
* returns the size of the set |
|
2589 |
*/ |
|
2590 |
public int size() |
|
2591 |
{ |
|
2592 |
return docids.size(); |
|
2593 |
} |
|
2594 |
|
|
2595 |
/** |
|
2596 |
* tests to see if this set contains the given docid |
|
2597 |
*/ |
|
2598 |
public boolean containsDocid(String docid) |
|
2599 |
{ |
|
2600 |
for(int i=0; i<docids.size(); i++) |
|
2601 |
{ |
|
2602 |
String docid0 = (String)docids.elementAt(i); |
|
2603 |
if(docid0.trim().equals(docid.trim())) |
|
2604 |
{ |
|
2605 |
return true; |
|
2606 |
} |
|
2607 |
} |
|
2608 |
return false; |
|
2609 |
} |
|
2610 |
|
|
2611 |
/** |
|
2612 |
* removes the element with the given docid |
|
2613 |
*/ |
|
2614 |
public String remove(String docid) |
|
2615 |
{ |
|
2616 |
for(int i=0; i<docids.size(); i++) |
|
2617 |
{ |
|
2618 |
String docid0 = (String)docids.elementAt(i); |
|
2619 |
if(docid0.trim().equals(docid.trim())) |
|
2620 |
{ |
|
2621 |
String returnDoc = (String)documents.elementAt(i); |
|
2622 |
documents.remove(i); |
|
2623 |
docids.remove(i); |
|
2624 |
return returnDoc; |
|
2625 |
} |
|
2626 |
} |
|
2627 |
return null; |
|
2628 |
} |
|
2629 |
|
|
2630 |
/** |
|
2631 |
* add a result document |
|
2632 |
*/ |
|
2633 |
public void put(ResultDocument rd) |
|
2634 |
{ |
|
2635 |
addResultDocument(rd); |
|
2636 |
} |
|
2637 |
|
|
2638 |
/** |
|
2639 |
* add a result document by components |
|
2640 |
*/ |
|
2641 |
public void put(String docid, String document) |
|
2642 |
{ |
|
2643 |
addResultDocument(new ResultDocument(docid, document)); |
|
2644 |
} |
|
2645 |
|
|
2646 |
/** |
|
2647 |
* get the document part of the result document by docid |
|
2648 |
*/ |
|
2649 |
public Object get(String docid) |
|
2650 |
{ |
|
2651 |
for(int i=0; i<docids.size(); i++) |
|
2652 |
{ |
|
2653 |
String docid0 = (String)docids.elementAt(i); |
|
2654 |
if(docid0.trim().equals(docid.trim())) |
|
2655 |
{ |
|
2656 |
return documents.elementAt(i); |
|
2657 |
} |
|
2658 |
} |
|
2659 |
return null; |
|
2660 |
} |
|
2661 |
|
|
2662 |
/** |
|
2663 |
* get the document part of the result document by an object |
|
2664 |
*/ |
|
2665 |
public Object get(Object o) |
|
2666 |
{ |
|
2667 |
return get((String)o); |
|
2668 |
} |
|
2669 |
|
|
2670 |
/** |
|
2671 |
* get an entire result document by index number |
|
2672 |
*/ |
|
2673 |
public ResultDocument get(int index) |
|
2674 |
{ |
|
2675 |
return new ResultDocument((String)docids.elementAt(index), |
|
2676 |
(String)documents.elementAt(index)); |
|
2677 |
} |
|
2678 |
|
|
2679 |
/** |
|
2680 |
* return a string representation of this object |
|
2681 |
*/ |
|
2682 |
public String toString() |
|
2683 |
{ |
|
2684 |
String s = ""; |
|
2685 |
for(int i=0; i<docids.size(); i++) |
|
2686 |
{ |
|
2687 |
s += (String)docids.elementAt(i) + "\n"; |
|
2688 |
} |
|
2689 |
return s; |
|
2690 |
} |
|
2691 |
} |
|
2479 | 2692 |
} |
Also available in: Unified diff
got a better version of paging working. still needs to be cleaned up and debugged more.