Project

General

Profile

1
/**
2
 *  '$RCSfile$'
3
 *    Purpose: A Class for upgrading the database to version 1.5
4
 *  Copyright: 2000 Regents of the University of California and the
5
 *             National Center for Ecological Analysis and Synthesis
6
 *    Authors: Saurabh Garg
7
 *
8
 *   '$Author: leinfelder $'
9
 *     '$Date: 2012-01-05 12:06:59 -0800 (Thu, 05 Jan 2012) $'
10
 * '$Revision: 6852 $'
11
 *
12
 * This program is free software; you can redistribute it and/or modify
13
 * it under the terms of the GNU General Public License as published by
14
 * the Free Software Foundation; either version 2 of the License, or
15
 * (at your option) any later version.
16
 *
17
 * This program is distributed in the hope that it will be useful,
18
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20
 * GNU General Public License for more details.
21
 *
22
 * You should have received a copy of the GNU General Public License
23
 * along with this program; if not, write to the Free Software
24
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
 */
26
package edu.ucsb.nceas.metacat.dataone;
27

    
28
import java.io.IOException;
29
import java.io.InputStream;
30
import java.math.BigInteger;
31
import java.net.URL;
32
import java.security.NoSuchAlgorithmException;
33
import java.sql.SQLException;
34
import java.util.ArrayList;
35
import java.util.Collections;
36
import java.util.Date;
37
import java.util.HashMap;
38
import java.util.Hashtable;
39
import java.util.List;
40
import java.util.Map;
41
import java.util.Vector;
42

    
43
import javax.xml.parsers.ParserConfigurationException;
44
import javax.xml.xpath.XPathExpressionException;
45

    
46
import org.apache.commons.beanutils.BeanUtils;
47
import org.apache.commons.io.IOUtils;
48
import org.apache.log4j.Logger;
49
import org.apache.wicket.protocol.http.MockHttpServletRequest;
50
import org.dataone.client.ObjectFormatCache;
51
import org.dataone.ore.ResourceMapFactory;
52
import org.dataone.service.exceptions.BaseException;
53
import org.dataone.service.exceptions.NotFound;
54
import org.dataone.service.types.v1.AccessPolicy;
55
import org.dataone.service.types.v1.Checksum;
56
import org.dataone.service.types.v1.Identifier;
57
import org.dataone.service.types.v1.NodeReference;
58
import org.dataone.service.types.v1.ObjectFormatIdentifier;
59
import org.dataone.service.types.v1.Session;
60
import org.dataone.service.types.v1.Subject;
61
import org.dataone.service.types.v1.SystemMetadata;
62
import org.dataone.service.types.v1.util.ChecksumUtil;
63
import org.dataone.service.util.DateTimeMarshaller;
64
import org.dspace.foresite.ResourceMap;
65
import org.ecoinformatics.datamanager.DataManager;
66
import org.ecoinformatics.datamanager.database.DatabaseConnectionPoolInterface;
67
import org.ecoinformatics.datamanager.parser.DataPackage;
68
import org.jibx.runtime.JiBXException;
69
import org.xml.sax.SAXException;
70

    
71
import edu.ucsb.nceas.metacat.AccessionNumber;
72
import edu.ucsb.nceas.metacat.AccessionNumberException;
73
import edu.ucsb.nceas.metacat.DBUtil;
74
import edu.ucsb.nceas.metacat.IdentifierManager;
75
import edu.ucsb.nceas.metacat.McdbDocNotFoundException;
76
import edu.ucsb.nceas.metacat.McdbException;
77
import edu.ucsb.nceas.metacat.MetaCatServlet;
78
import edu.ucsb.nceas.metacat.MetacatHandler;
79
import edu.ucsb.nceas.metacat.accesscontrol.AccessControlException;
80
import edu.ucsb.nceas.metacat.client.InsufficientKarmaException;
81
import edu.ucsb.nceas.metacat.dataone.hazelcast.HazelcastService;
82
import edu.ucsb.nceas.metacat.dataquery.MetacatDatabaseConnectionPoolFactory;
83
import edu.ucsb.nceas.metacat.properties.PropertyService;
84
import edu.ucsb.nceas.metacat.replication.ReplicationService;
85
import edu.ucsb.nceas.metacat.shared.AccessException;
86
import edu.ucsb.nceas.metacat.shared.HandlerException;
87
import edu.ucsb.nceas.metacat.util.DocumentUtil;
88
import edu.ucsb.nceas.utilities.ParseLSIDException;
89
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
90

    
91
public class SystemMetadataFactory {
92

    
93
	private static final String resourceMapPrefix = "resourceMap_";
94
	private static Logger logMetacat = Logger.getLogger(SystemMetadataFactory.class);
95
	
96
	/**
97
	 * Creates a system metadata object for insertion into metacat
98
	 * 
99
	 * @param localId
100
	 *            The local document identifier
101
	 * @param user
102
	 *            The user submitting the system metadata document
103
	 * @param groups
104
	 *            The groups the user belongs to
105
	 * 
106
	 * @return sysMeta The system metadata object created
107
	 * @throws SAXException 
108
	 * @throws HandlerException 
109
	 * @throws AccessControlException 
110
	 * @throws AccessException 
111
	 */
112
	public static SystemMetadata createSystemMetadata(String localId, boolean includeORE, boolean downloadData)
113
			throws McdbException, McdbDocNotFoundException, SQLException,
114
			IOException, AccessionNumberException, ClassNotFoundException,
115
			InsufficientKarmaException, ParseLSIDException,
116
			PropertyNotFoundException, BaseException, NoSuchAlgorithmException,
117
			JiBXException, AccessControlException, HandlerException, SAXException, AccessException {
118
		
119
		logMetacat.debug("MetacatHandler.createSystemMetadata() called.");
120
		logMetacat.debug("provided localId: " + localId);
121

    
122
		// create system metadata for the document
123
		SystemMetadata sysMeta = new SystemMetadata();
124
		sysMeta.setSerialVersion(BigInteger.valueOf(1));
125
		AccessionNumber accNum = new AccessionNumber(localId, "NONE");
126
		int rev = Integer.valueOf(accNum.getRev());
127

    
128
		String guid = null;
129
		try {
130
			// get the guid if it exists
131
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
132
		} catch (McdbDocNotFoundException dnfe) {
133
			// otherwise create the mapping
134
			logMetacat.debug("There was a problem getting the guid from "
135
							+ "the given localId (docid and revision). The error message was: "
136
							+ dnfe.getMessage());
137
			logMetacat.debug("No guid in the identifier table.  adding it for " + localId);
138
			IdentifierManager.getInstance().createMapping(localId, localId);
139
			logMetacat.debug("Mapping created for " + localId);
140
			logMetacat.debug("accessionNumber: " + accNum);
141
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
142
		}
143
		Identifier identifier = new Identifier();
144
		identifier.setValue(guid);
145

    
146
		// set the id
147
		sysMeta.setIdentifier(identifier);
148

    
149
		// get the data or metadata object
150
		InputStream inputStream;
151
		try {
152
			inputStream = MetacatHandler.read(localId);
153
		} catch (ParseLSIDException ple) {
154
			logMetacat.debug("There was a problem parsing the LSID from "
155
					+ localId + ". The error message was: " + ple.getMessage());
156
			throw ple;
157

    
158
		} catch (PropertyNotFoundException pnfe) {
159
			logMetacat.debug("There was a problem finding a property. "
160
					+ "The error message was: " + pnfe.getMessage());
161
			throw pnfe;
162

    
163
		} catch (McdbException me) {
164
			logMetacat.debug("There was a Metacat problem. "
165
					+ "The error message was: " + me.getMessage());
166
			throw me;
167

    
168
		} catch (SQLException sqle) {
169
			logMetacat.debug("There was a SQL problem. "
170
					+ "The error message was: " + sqle.getMessage());
171
			throw sqle;
172

    
173
		} catch (ClassNotFoundException cnfe) {
174
			logMetacat.debug("There was a problem finding a class. "
175
					+ "The error message was: " + cnfe.getMessage());
176
			throw cnfe;
177

    
178
		} catch (IOException ioe) {
179
			logMetacat.debug("There was an I/O exception. "
180
					+ "The error message was: " + ioe.getMessage());
181
			throw ioe;
182

    
183
		} // end try()
184

    
185
		// get additional docinfo
186
		Hashtable<String, String> docInfo = ReplicationService.getDocumentInfoMap(localId);
187
		// set the default object format
188
		String doctype = docInfo.get("doctype");
189
		ObjectFormatIdentifier fmtid = null;
190

    
191
		// set the object format, fall back to defaults
192
		try {
193
			fmtid = ObjectFormatCache.getInstance().getFormat(doctype).getFormatId();
194
		} catch (NotFound nfe) {
195

    
196
			try {
197
				// format is not registered, use default
198
				if (doctype.trim().equals("BIN")) {
199
					fmtid = ObjectFormatCache.getInstance().getFormat(
200
							"application/octet-stream").getFormatId();
201

    
202
				} else {
203
					fmtid = ObjectFormatCache.getInstance().getFormat(
204
							"text/plain").getFormatId();
205
				}
206

    
207
			} catch (NotFound nf) {
208
				logMetacat.error("There was a problem getting the default format "
209
								+ "from the ObjectFormatCache: "
210
								+ nf.getMessage());
211
				throw nf;
212
			}
213

    
214
		}
215

    
216
		sysMeta.setFormatId(fmtid);
217
		logMetacat.debug("The ObjectFormat for " + localId + " is " + fmtid.getValue());
218

    
219
		// create the checksum
220
		inputStream = MetacatHandler.read(localId);
221
		String algorithm = "MD5";
222
		Checksum checksum = ChecksumUtil.checksum(inputStream, algorithm);
223
		sysMeta.setChecksum(checksum);
224
		
225
		// set the size
226
		inputStream = MetacatHandler.read(localId);
227
		String sizeStr = new Long(sizeOfStream(inputStream)).toString();
228
		sysMeta.setSize(new BigInteger(sizeStr));
229
		
230
		// submitter
231
		Subject submitter = new Subject();
232
		submitter.setValue(docInfo.get("user_updated"));
233
		sysMeta.setSubmitter(submitter);
234
		
235
		// rights holder
236
		Subject owner = new Subject();
237
		owner.setValue(docInfo.get("user_owner"));
238
		sysMeta.setRightsHolder(owner);
239

    
240
		// dates
241
		String createdDateString = docInfo.get("date_created");
242
		String updatedDateString = docInfo.get("date_updated");
243
		Date createdDate = DateTimeMarshaller.deserializeDateToUTC(createdDateString);
244
		Date updatedDate = DateTimeMarshaller.deserializeDateToUTC(updatedDateString);  
245
		sysMeta.setDateUploaded(createdDate);
246
		sysMeta.setDateSysMetadataModified(updatedDate);
247
		
248
		// set the revision history
249
		String docidWithoutRev = accNum.getDocid();
250
		Identifier obsoletedBy = null;
251
		Identifier obsoletes = null;
252
		Vector<Integer> revisions = DBUtil.getRevListFromRevisionTable(docidWithoutRev);
253
		// ensure this ordering since processing depends on it
254
		Collections.sort(revisions);
255
		for (int existingRev: revisions) {
256
			// use the docid+rev as the guid
257
			String existingPid = docidWithoutRev + "." + existingRev;
258
			if (existingRev < rev) {
259
				// it's the old docid, until it's not
260
				obsoletes = new Identifier();
261
				obsoletes.setValue(existingPid);
262
			}
263
			if (existingRev > rev) {
264
				// it's the newer docid
265
				obsoletedBy = new Identifier();
266
				obsoletedBy.setValue(existingPid);
267
				// only want the version just after it
268
				break;
269
			}
270
		}
271
		// set them on our object
272
		sysMeta.setObsoletedBy(obsoletedBy);
273
		sysMeta.setObsoletes(obsoletes);
274
		
275
		// update the system metadata for the object[s] we are revising
276
		if (obsoletedBy != null) {
277
			//SystemMetadata obsoletedBySysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletedBy);
278
			SystemMetadata obsoletedBySysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletedBy.getValue());
279
			if (obsoletedBySysMeta != null) {
280
				obsoletedBySysMeta.setObsoletes(identifier);
281
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletedBy, obsoletedBySysMeta);
282
			}
283
		}
284
		if (obsoletes != null) {
285
			//SystemMetadata obsoletesSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletes);
286
			SystemMetadata obsoletesSysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletes.getValue());
287
			if (obsoletesSysMeta != null) {
288
				obsoletesSysMeta.setObsoletedBy(identifier);
289
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletes, obsoletesSysMeta);
290
			}
291
		}
292
		
293
		// look up the access control policy we have in metacat
294
		AccessPolicy accessPolicy = IdentifierManager.getInstance().getAccessPolicy(guid);
295
		sysMeta.setAccessPolicy(accessPolicy);
296
		
297
		// authoritative node
298
		NodeReference nr = new NodeReference();
299
		nr.setValue(PropertyService.getProperty("dataone.memberNodeId"));
300
		sysMeta.setOriginMemberNode(nr);
301
		sysMeta.setAuthoritativeMemberNode(nr);
302
		
303
		// further parse EML documents to get data object format,
304
		// describes and describedBy information
305
		if (fmtid == ObjectFormatCache.getInstance().getFormat(
306
				"eml://ecoinformatics.org/eml-2.0.0").getFormatId()
307
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
308
						"eml://ecoinformatics.org/eml-2.0.1").getFormatId()
309
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
310
						"eml://ecoinformatics.org/eml-2.1.0").getFormatId()
311
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
312
						"eml://ecoinformatics.org/eml-2.1.1").getFormatId()) {
313

    
314
			try {
315
				inputStream = MetacatHandler.read(localId);
316
				DatabaseConnectionPoolInterface connectionPool = 
317
					MetacatDatabaseConnectionPoolFactory.getDatabaseConnectionPoolInterface();
318
				DataManager dataManager = 
319
					DataManager.getInstance(connectionPool, connectionPool.getDBAdapterName());
320
				DataPackage dataPackage = dataManager.parseMetadata(inputStream);
321

    
322
				// iterate through the data objects in the EML doc and add sysmeta
323
				logMetacat.debug("In createSystemMetadata() the number of data "
324
								+ "entities is: "
325
								+ dataPackage.getEntityNumber());
326

    
327
				// for generating the ORE map
328
	            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
329
	            List<Identifier> dataIds = new ArrayList<Identifier>();
330
				
331
				// iterate through data objects described by the EML
332
	            if (dataPackage.getEntityList() != null) {
333
					for (int j = 0; j < dataPackage.getEntityList().length; j++) {
334
	
335
						String dataDocUrl = dataPackage.getEntityList()[j].getURL();
336
						String dataDocMimeType = dataPackage.getEntityList()[j].getDataFormat();
337
						// default to binary
338
						if (dataDocMimeType == null) {
339
							dataDocMimeType = ObjectFormatCache.getInstance()
340
									.getFormat("application/octet-stream")
341
									.getFormatId().getValue();
342
						}
343

    
344
						// process the data
345
						String dataDocLocalId = null;
346
						Identifier dataGuid = new Identifier();
347

    
348
						// handle ecogrid, or downloadable data
349
						String ecogridPrefix = "ecogrid://knb/";
350
						if (dataDocUrl.trim().startsWith(ecogridPrefix)) {
351
							dataDocLocalId = dataDocUrl.substring(dataDocUrl.indexOf(ecogridPrefix) + ecogridPrefix.length());
352
						} else {
353
							// should we try downloading the remote data?
354
							if (downloadData) {
355
								InputStream dataObject = null;
356
								try {
357
									// download the data from the URL
358
									URL dataURL = new URL(dataDocUrl);
359
									dataObject = dataURL.openStream();
360
									// TODO: check for valid content
361
								} catch (Exception e) {
362
									// error with the download
363
									logMetacat.warn("Error downloading remote data. " + e.getMessage());
364
								}
365
								
366
								if (dataObject != null) {
367
									// create the local version of it
368
									dataDocLocalId = DocumentUtil.generateDocumentId(1);
369
									IdentifierManager.getInstance().createMapping(dataDocLocalId, dataDocLocalId);
370
									dataGuid.setValue(dataDocLocalId);
371
									
372
									// save it locally
373
									Session session = new Session();
374
									session.setSubject(submitter);
375
									MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
376
									MNodeService.getInstance(request).insertDataObject(dataObject, dataGuid, session);
377
								}
378
							}
379
							
380
						}
381
						
382
						logMetacat.debug("Data local ID: " + dataDocLocalId);
383
						logMetacat.debug("Data URL     : " + dataDocUrl);
384
						logMetacat.debug("Data mime    : " + dataDocMimeType);
385
						
386
						// now we have a local id for the data
387
						if (dataDocLocalId != null) {
388
							
389
							// look up the guid for the data
390
							String dataDocid = DocumentUtil.getSmartDocId(dataDocLocalId);
391
							int dataRev = DocumentUtil.getRevisionFromAccessionNumber(dataDocLocalId);
392
	
393
							// check if data system metadata exists already
394
							SystemMetadata dataSysMeta = null;
395
							String dataGuidString = null;
396
							try {
397
								// look for the identifier
398
								dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
399
								// set it
400
								dataGuid.setValue(dataGuidString);
401
								// look up the system metadata
402
								try {
403
									dataSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(dataGuid);
404
								} catch (Exception e) {
405
									// probably not in the system
406
									dataSysMeta = null;
407
								}
408
								//dataSysMeta = IdentifierManager.getInstance().getSystemMetadata(dataGuidString);
409
							} catch (McdbDocNotFoundException nf) {
410
								// we didn't find it
411
								dataSysMeta = null;
412
							}
413
								
414
							// we'll have to generate it	
415
							if (dataSysMeta == null) {
416
								// System metadata for data doesn't exist yet, so create it
417
								logMetacat.debug("There was not an existing system metadata document for " + dataDocLocalId);
418
								try {
419
									logMetacat.debug("Creating a system metadata " + "document for " + dataDocLocalId);
420
									dataSysMeta = createSystemMetadata(dataDocLocalId, includeORE, false);
421
	
422
									// now look it up again
423
									dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
424
	
425
									// set the guid
426
									dataGuid.setValue(dataGuidString);
427
	
428
									// set object format
429
									logMetacat.debug("Updating system metadata for "
430
													+ dataGuid.getValue() + " to "
431
													+ dataDocMimeType);
432
									try {
433
										ObjectFormatIdentifier fmt = 
434
											ObjectFormatCache.getInstance().getFormat(dataDocMimeType).getFormatId();
435
										dataSysMeta.setFormatId(fmt);
436
									} catch (NotFound nfe) {
437
										logMetacat.debug("Couldn't find format identifier for: "
438
														+ dataDocMimeType
439
														+ ". Setting it to application/octet-stream.");
440
										ObjectFormatIdentifier newFmtid = new ObjectFormatIdentifier();
441
										newFmtid.setValue("application/octet-stream");
442
									}
443
	
444
									// update the values
445
									HazelcastService.getInstance().getSystemMetadataMap().put(dataSysMeta.getIdentifier(), dataSysMeta);
446
									
447
	
448
								} catch (McdbDocNotFoundException mdnf) {
449
									mdnf.printStackTrace();
450
									throw mdnf;
451
								} catch (NumberFormatException nfe) {
452
									nfe.printStackTrace();
453
									throw nfe;
454
								} catch (AccessionNumberException ane) {
455
									ane.printStackTrace();
456
									throw ane;
457
								} catch (SQLException sqle) {
458
									sqle.printStackTrace();
459
									throw sqle;
460
								} catch (NoSuchAlgorithmException nsae) {
461
									nsae.printStackTrace();
462
									throw nsae;
463
								} catch (IOException ioe) {
464
									ioe.printStackTrace();
465
									throw ioe;
466
								} catch (PropertyNotFoundException pnfe) {
467
									pnfe.printStackTrace();
468
									throw pnfe;
469
								} catch (BaseException be) {
470
									be.printStackTrace();
471
									throw be;
472
								}	
473
								
474
							}
475
							
476
							// part of the ORE package
477
							dataIds.add(dataGuid);
478
	
479
						} // end if (EML package)
480
	
481
					} // end for (data entities)
482
					
483
	            } // data entities not null
484
	            
485
				// ORE map
486
				if (includeORE) {
487
					// can we generate them?
488
			        if (!dataIds.isEmpty()) {
489
			        	// it doesn't exist in the system?
490
			        	if (!oreExistsFor(sysMeta.getIdentifier())) {
491
			        	
492
				            // generate the ORE map for this datapackage
493
				            Identifier resourceMapId = new Identifier();
494
				            // want to be able to run this over and over again for now
495
				            resourceMapId.setValue(resourceMapPrefix + sysMeta.getIdentifier().getValue());
496
				            idMap.put(sysMeta.getIdentifier(), dataIds);
497
				            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(resourceMapId, idMap);
498
				            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
499
				            // copy most of the same system metadata as the packaging metadata
500
				            SystemMetadata resourceMapSysMeta = new SystemMetadata();
501
				            BeanUtils.copyProperties(resourceMapSysMeta, sysMeta);
502
				            resourceMapSysMeta.setIdentifier(resourceMapId);
503
				            Checksum oreChecksum = ChecksumUtil.checksum(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), "MD5");
504
							resourceMapSysMeta.setChecksum(oreChecksum);
505
				            ObjectFormatIdentifier formatId = ObjectFormatCache.getInstance().getFormat("http://www.openarchives.org/ore/terms").getFormatId();
506
							resourceMapSysMeta.setFormatId(formatId);
507
							resourceMapSysMeta.setSize(BigInteger.valueOf(sizeOfStream(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING))));
508
							
509
							// set the revision graph
510
							resourceMapSysMeta.setObsoletes(null);
511
							resourceMapSysMeta.setObsoletedBy(null);
512
							// look up the resource map that this one obsoletes
513
							if (sysMeta.getObsoletes() != null) {
514
								Identifier resourceMapObsoletes = new Identifier();
515
								resourceMapObsoletes.setValue(resourceMapPrefix + sysMeta.getObsoletes().getValue());
516
								resourceMapSysMeta.setObsoletes(resourceMapObsoletes);
517
								SystemMetadata resourceMapObsoletesSystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletes);
518
								if (resourceMapObsoletesSystemMetadata != null) {
519
									resourceMapObsoletesSystemMetadata.setObsoletedBy(resourceMapId);
520
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletes, resourceMapObsoletesSystemMetadata);
521
								}
522
							}
523
							// look up the resource map that this one is obsoletedBy
524
							if (sysMeta.getObsoletedBy() != null) {
525
								Identifier resourceMapObsoletedBy = new Identifier();
526
								resourceMapObsoletedBy.setValue(resourceMapPrefix + sysMeta.getObsoletedBy().getValue());
527
								resourceMapSysMeta.setObsoletedBy(resourceMapObsoletedBy);
528
								SystemMetadata resourceMapObsoletedBySystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletedBy);
529
								if (resourceMapObsoletedBySystemMetadata != null) {
530
									resourceMapObsoletedBySystemMetadata.setObsoletes(resourceMapId);
531
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletedBy, resourceMapObsoletedBySystemMetadata);
532
								}
533
							}
534
				            
535
							// save it locally
536
							Session session = new Session();
537
							session.setSubject(submitter);
538
							MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
539
							MNodeService.getInstance(request).insertDataObject(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), resourceMapId, session);
540
							MNodeService.getInstance(request).insertSystemMetadata(resourceMapSysMeta);
541
			        	}
542
			        }
543
				}
544

    
545
			} catch (ParserConfigurationException pce) {
546
				logMetacat.debug("There was a problem parsing the EML document. "
547
								+ "The error message was: " + pce.getMessage());
548

    
549
			} catch (SAXException saxe) {
550
				logMetacat.debug("There was a problem traversing the EML document. "
551
								+ "The error message was: " + saxe.getMessage());
552

    
553
			} catch (XPathExpressionException xpee) {
554
				logMetacat.debug("There was a problem searching the EML document. "
555
								+ "The error message was: " + xpee.getMessage());
556
			} catch (Exception e) {
557
				logMetacat.debug("There was a problem creating System Metadata. "
558
								+ "The error message was: " + e.getMessage());
559
				e.printStackTrace();
560
			} // end try()
561

    
562
		} // end if()
563

    
564
		return sysMeta;
565
	}
566
	
567
	/**
568
	 * Determines if we already have registered an ORE map for this package
569
	 * @param guid of the EML/packaging object
570
	 * @return true if there is an ORE map for the given package
571
	 */
572
	private static boolean oreExistsFor(Identifier guid) {
573
		// TODO: implement call to CN.search()
574
		return false;
575
	}
576

    
577
	/**
578
	 * Find the size (in bytes) of a stream. Note: This needs to refactored out
579
	 * of MetacatHandler and into a utility when stream i/o in Metacat is
580
	 * evaluated.
581
	 * 
582
	 * @param is The InputStream of bytes
583
	 * 
584
	 * @return size The size in bytes of the input stream as a long
585
	 * 
586
	 * @throws IOException
587
	 */
588
	private static long sizeOfStream(InputStream is) throws IOException {
589

    
590
		long size = 0;
591
		byte[] b = new byte[1024];
592
		int numread = is.read(b, 0, 1024);
593
		while (numread != -1) {
594
			size += numread;
595
			numread = is.read(b, 0, 1024);
596
		}
597
		return size;
598

    
599
	}
600
}
(5-5/5)