41 |
41 |
import java.sql.PreparedStatement;
|
42 |
42 |
import java.sql.ResultSet;
|
43 |
43 |
import java.sql.SQLException;
|
|
44 |
import java.sql.Statement;
|
44 |
45 |
import java.util.Enumeration;
|
45 |
46 |
import java.util.Hashtable;
|
46 |
47 |
import java.util.HashMap;
|
... | ... | |
56 |
57 |
import org.xml.sax.EntityResolver;
|
57 |
58 |
import org.xml.sax.ErrorHandler;
|
58 |
59 |
import org.xml.sax.InputSource;
|
|
60 |
import org.xml.sax.SAXException;
|
59 |
61 |
import org.xml.sax.XMLReader;
|
60 |
62 |
import org.xml.sax.helpers.XMLReaderFactory;
|
61 |
63 |
|
... | ... | |
1157 |
1159 |
* into the xml_index table. This requires that the DocumentImpl instance
|
1158 |
1160 |
* exists, so first call the constructor that reads the document from the
|
1159 |
1161 |
* database.
|
|
1162 |
*
|
|
1163 |
* @throws McdbException on error getting the node records for the document
|
1160 |
1164 |
*/
|
1161 |
1165 |
public void buildIndex() throws McdbException
|
1162 |
1166 |
{
|
... | ... | |
1175 |
1179 |
Long nodeId = new Long(currentNode.getNodeId());
|
1176 |
1180 |
nodeRecordMap.put(nodeId, currentNode);
|
1177 |
1181 |
}
|
|
1182 |
|
|
1183 |
// Opening separate db connection for deleting and writing
|
|
1184 |
// XML Index -- be sure that it is all in one db transaction
|
|
1185 |
int serialNumber = -1;
|
|
1186 |
DBConnection dbConn = null;
|
|
1187 |
try {
|
|
1188 |
dbConn = DBConnectionPool.getDBConnection(
|
|
1189 |
"DocumentImpl.buildIndex");
|
|
1190 |
serialNumber = dbConn.getCheckOutSerialNumber();
|
|
1191 |
dbConn.setAutoCommit(false);
|
|
1192 |
//make sure record is done
|
|
1193 |
//checkDocumentTable();
|
|
1194 |
|
|
1195 |
// Delete the previous index entries for this document
|
|
1196 |
deleteNodeIndex(dbConn);
|
|
1197 |
|
|
1198 |
// Step through all of the node records we were given
|
|
1199 |
// and build the new index and update the database
|
|
1200 |
it = nodeRecordLists.iterator();
|
|
1201 |
while (it.hasNext()) {
|
|
1202 |
NodeRecord currentNode = (NodeRecord) it.next();
|
|
1203 |
HashMap pathList = new HashMap();
|
|
1204 |
if (currentNode.nodetype.equals("ELEMENT") ||
|
|
1205 |
currentNode.nodetype.equals("ATTRIBUTE") ) {
|
1178 |
1206 |
|
1179 |
|
// Step through all of the node records we were given
|
1180 |
|
it = nodeRecordLists.iterator();
|
1181 |
|
while (it.hasNext()) {
|
1182 |
|
NodeRecord currentNode = (NodeRecord) it.next();
|
1183 |
|
if (currentNode.nodetype.equals("ELEMENT") ||
|
1184 |
|
currentNode.nodetype.equals("ATTRIBUTE") ) {
|
1185 |
|
|
1186 |
|
System.err.println("\nStarting Node: " +
|
1187 |
|
currentNode.getNodeId() + " (" +
|
1188 |
|
currentNode.getParentNodeId() + "): " +
|
1189 |
|
currentNode.getNodeName() + " (" +
|
1190 |
|
currentNode.getNodeType() + ")");
|
1191 |
|
if (atRootElement) {
|
1192 |
|
rootNodeId = currentNode.getNodeId();
|
1193 |
|
atRootElement = false;
|
|
1207 |
System.err.println("\nStarting Node: " +
|
|
1208 |
currentNode.getNodeId() + " (" +
|
|
1209 |
currentNode.getParentNodeId() + "): " +
|
|
1210 |
currentNode.getNodeName() + " (" +
|
|
1211 |
currentNode.getNodeType() + ")");
|
|
1212 |
if (atRootElement) {
|
|
1213 |
rootNodeId = currentNode.getNodeId();
|
|
1214 |
atRootElement = false;
|
|
1215 |
}
|
|
1216 |
traverseParents(nodeRecordMap, rootNodeId,
|
|
1217 |
currentNode.getNodeId(),
|
|
1218 |
currentNode.getNodeId(), "", pathList);
|
|
1219 |
updateNodeIndex(dbConn, pathList);
|
1194 |
1220 |
}
|
1195 |
|
traverseParents(nodeRecordMap, rootNodeId,
|
1196 |
|
currentNode.getNodeId(), "");
|
1197 |
1221 |
}
|
|
1222 |
dbConn.commit();
|
|
1223 |
} catch (SQLException e) {
|
|
1224 |
MetaCatUtil.debugMessage(
|
|
1225 |
"SQL Exception while inserting path index in " +
|
|
1226 |
"DocumentImpl.buildIndex for document " + docid, 10);
|
|
1227 |
MetaCatUtil.debugMessage(e.getMessage(), 10);
|
|
1228 |
e.printStackTrace();
|
|
1229 |
try {
|
|
1230 |
dbConn.rollback();
|
|
1231 |
} catch (SQLException sqle) {
|
|
1232 |
MetaCatUtil.debugMessage(
|
|
1233 |
"Error while rolling back commit in DocumentImpl.buildIndex"
|
|
1234 |
+ "\n" + sqle.getMessage(), 10);
|
|
1235 |
}
|
|
1236 |
} finally {
|
|
1237 |
DBConnectionPool.returnDBConnection(dbConn, serialNumber);
|
1198 |
1238 |
}
|
1199 |
1239 |
}
|
1200 |
1240 |
|
... | ... | |
1204 |
1244 |
*
|
1205 |
1245 |
* @param records the set of records hashed by nodeId
|
1206 |
1246 |
* @param rootNodeId the id of the root element of the document
|
|
1247 |
* @param leafNodeId the id of the leafNode being processed
|
1207 |
1248 |
* @param id the id of the current node to be processed
|
1208 |
1249 |
* @param children the string representation of all child nodes of this id
|
|
1250 |
* @param pathList the hash to which paths are added
|
1209 |
1251 |
*/
|
1210 |
|
private void traverseParents(HashMap records, long rootNodeId, long id,
|
1211 |
|
String children) {
|
1212 |
|
NodeRecord current = (NodeRecord)records.get(new Long(id));
|
|
1252 |
private void traverseParents(HashMap records, long rootNodeId,
|
|
1253 |
long leafNodeId, long id,
|
|
1254 |
String children, HashMap pathList) {
|
|
1255 |
Long nodeId = new Long(id);
|
|
1256 |
NodeRecord current = (NodeRecord)records.get(nodeId);
|
|
1257 |
long parentId = current.getParentNodeId();
|
1213 |
1258 |
String currentName = current.getNodeName();
|
1214 |
1259 |
if (current.nodetype.equals("ELEMENT") ||
|
1215 |
1260 |
current.nodetype.equals("ATTRIBUTE") ) {
|
1216 |
1261 |
|
1217 |
1262 |
if (children.equals("")) {
|
1218 |
1263 |
System.err.print("A: " + currentName +"\n");
|
|
1264 |
pathList.put(currentName, new PathIndexEntry(
|
|
1265 |
leafNodeId, currentName, docid, doctype, parentId));
|
1219 |
1266 |
}
|
1220 |
1267 |
currentName = "/" + currentName;
|
1221 |
|
long parentId = current.getParentNodeId();
|
1222 |
1268 |
currentName = currentName + children;
|
1223 |
1269 |
if (parentId != 0) {
|
1224 |
|
traverseParents(records, rootNodeId, parentId, currentName);
|
|
1270 |
traverseParents(records, rootNodeId, leafNodeId,
|
|
1271 |
parentId, currentName, pathList);
|
1225 |
1272 |
}
|
|
1273 |
String path = current.getNodeName() + children;
|
1226 |
1274 |
if (!children.equals("")) {
|
1227 |
|
System.err.print("B: " + current.getNodeName() + children +"\n");
|
|
1275 |
System.err.print("B: " + path +"\n");
|
|
1276 |
pathList.put(path, new PathIndexEntry(leafNodeId, path, docid,
|
|
1277 |
doctype, parentId));
|
1228 |
1278 |
}
|
1229 |
1279 |
if (id == rootNodeId) {
|
1230 |
|
System.err.print("C: " + '/' + current.getNodeName() +
|
1231 |
|
children +"\n");
|
|
1280 |
String fullPath = '/' + path;
|
|
1281 |
System.err.print("C: " + fullPath +"\n");
|
|
1282 |
pathList.put(fullPath, new PathIndexEntry(leafNodeId, fullPath,
|
|
1283 |
docid, doctype, parentId));
|
1232 |
1284 |
}
|
1233 |
1285 |
}
|
1234 |
1286 |
}
|
1235 |
1287 |
|
|
1288 |
/**
|
|
1289 |
* Delete the paths from the xml_index table on the database in preparation
|
|
1290 |
* of a subsequent update.
|
|
1291 |
*
|
|
1292 |
* @param conn the database connection to use, keeping a single transaction
|
|
1293 |
* @throws SQLException if there is an error deleting from the db
|
|
1294 |
*/
|
|
1295 |
public void deleteNodeIndex(DBConnection conn) throws SQLException
|
|
1296 |
{
|
|
1297 |
String familyId = MetaCatUtil.getDocIdFromString(docid);
|
|
1298 |
String sql = "DELETE FROM xml_index WHERE docid LIKE ?";
|
|
1299 |
MetaCatUtil.debugMessage(sql, 55);
|
|
1300 |
System.err.println("SQL is: " + sql);
|
|
1301 |
|
|
1302 |
PreparedStatement pstmt = conn.prepareStatement(sql);
|
|
1303 |
|
|
1304 |
// Increase usage count for the connection
|
|
1305 |
conn.increaseUsageCount(1);
|
|
1306 |
|
|
1307 |
// Execute the delete and close the statement
|
|
1308 |
pstmt.setString(1, familyId);
|
|
1309 |
int rows = pstmt.executeUpdate();
|
|
1310 |
pstmt.close();
|
|
1311 |
MetaCatUtil.debugMessage("Deleted " + rows + " rows from xml_index " +
|
|
1312 |
"for document " + docid, 55);
|
|
1313 |
}
|
|
1314 |
|
|
1315 |
/**
|
|
1316 |
* Insert the paths from the pathList into the xml_index table on the
|
|
1317 |
* database.
|
|
1318 |
*
|
|
1319 |
* @param conn the database connection to use, keeping a single transaction
|
|
1320 |
* @param pathList the hash of paths to insert
|
|
1321 |
* @throws SQLException if there is an error inserting into the db
|
|
1322 |
*/
|
|
1323 |
private void updateNodeIndex(DBConnection conn, HashMap pathList)
|
|
1324 |
throws SQLException
|
|
1325 |
{
|
|
1326 |
// Create an insert statement to reuse for all of the path
|
|
1327 |
// insertions
|
|
1328 |
PreparedStatement pstmt = conn.prepareStatement(
|
|
1329 |
"INSERT INTO xml_index (nodeid, path, docid, doctype, " +
|
|
1330 |
"parentnodeid) " + "VALUES (?, ?, ?, ?, ?)");
|
|
1331 |
// Increase usage count for the connection
|
|
1332 |
conn.increaseUsageCount(1);
|
|
1333 |
String familyId = MetaCatUtil.getDocIdFromString(docid);
|
|
1334 |
pstmt.setString(3, familyId);
|
|
1335 |
pstmt.setString(4, doctype);
|
|
1336 |
|
|
1337 |
// Step through the hashtable and insert each of the path values
|
|
1338 |
Iterator it = pathList.values().iterator();
|
|
1339 |
while (it.hasNext()) {
|
|
1340 |
PathIndexEntry entry = (PathIndexEntry)it.next();
|
|
1341 |
System.err.println("Inserting: " + entry.nodeId +
|
|
1342 |
" (" + entry.parentId + "): " + entry.path);
|
|
1343 |
pstmt.setLong(1, entry.nodeId);
|
|
1344 |
pstmt.setString(2, entry.path);
|
|
1345 |
pstmt.setLong(5, entry.parentId);
|
|
1346 |
pstmt.executeUpdate();
|
|
1347 |
}
|
|
1348 |
// Close the database statement
|
|
1349 |
pstmt.close();
|
|
1350 |
}
|
|
1351 |
|
1236 |
1352 |
private boolean isRevisionOnly(DocumentIdentifier docid) throws Exception
|
1237 |
1353 |
{
|
1238 |
1354 |
//System.out.println("inRevisionOnly");
|
The new buildIndex() function now can write allof the appropriate index paths to the database for any given document. Next need to create a function to rebuild on demand, and modify DBSAXHandler.run() to use the new buildIndex() function.