Project

General

Profile

1
/**
2
 *  '$RCSfile$'
3
 *    Purpose: A Class for upgrading the database to version 1.5
4
 *  Copyright: 2000 Regents of the University of California and the
5
 *             National Center for Ecological Analysis and Synthesis
6
 *    Authors: Saurabh Garg
7
 *
8
 *   '$Author: leinfelder $'
9
 *     '$Date: 2012-01-05 13:37:12 -0800 (Thu, 05 Jan 2012) $'
10
 * '$Revision: 6855 $'
11
 *
12
 * This program is free software; you can redistribute it and/or modify
13
 * it under the terms of the GNU General Public License as published by
14
 * the Free Software Foundation; either version 2 of the License, or
15
 * (at your option) any later version.
16
 *
17
 * This program is distributed in the hope that it will be useful,
18
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20
 * GNU General Public License for more details.
21
 *
22
 * You should have received a copy of the GNU General Public License
23
 * along with this program; if not, write to the Free Software
24
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
 */
26
package edu.ucsb.nceas.metacat.dataone;
27

    
28
import java.io.IOException;
29
import java.io.InputStream;
30
import java.math.BigInteger;
31
import java.net.URL;
32
import java.security.NoSuchAlgorithmException;
33
import java.sql.SQLException;
34
import java.util.ArrayList;
35
import java.util.Collections;
36
import java.util.Date;
37
import java.util.HashMap;
38
import java.util.Hashtable;
39
import java.util.List;
40
import java.util.Map;
41
import java.util.Vector;
42

    
43
import javax.xml.parsers.ParserConfigurationException;
44
import javax.xml.xpath.XPathExpressionException;
45

    
46
import org.apache.commons.beanutils.BeanUtils;
47
import org.apache.commons.io.IOUtils;
48
import org.apache.log4j.Logger;
49
import org.apache.wicket.protocol.http.MockHttpServletRequest;
50
import org.dataone.client.ObjectFormatCache;
51
import org.dataone.ore.ResourceMapFactory;
52
import org.dataone.service.exceptions.BaseException;
53
import org.dataone.service.exceptions.NotFound;
54
import org.dataone.service.types.v1.AccessPolicy;
55
import org.dataone.service.types.v1.Checksum;
56
import org.dataone.service.types.v1.Identifier;
57
import org.dataone.service.types.v1.NodeReference;
58
import org.dataone.service.types.v1.ObjectFormatIdentifier;
59
import org.dataone.service.types.v1.Session;
60
import org.dataone.service.types.v1.Subject;
61
import org.dataone.service.types.v1.SystemMetadata;
62
import org.dataone.service.types.v1.util.ChecksumUtil;
63
import org.dataone.service.util.DateTimeMarshaller;
64
import org.dspace.foresite.ResourceMap;
65
import org.ecoinformatics.datamanager.DataManager;
66
import org.ecoinformatics.datamanager.database.DatabaseConnectionPoolInterface;
67
import org.ecoinformatics.datamanager.parser.DataPackage;
68
import org.jibx.runtime.JiBXException;
69
import org.xml.sax.SAXException;
70

    
71
import edu.ucsb.nceas.metacat.AccessionNumber;
72
import edu.ucsb.nceas.metacat.AccessionNumberException;
73
import edu.ucsb.nceas.metacat.DBUtil;
74
import edu.ucsb.nceas.metacat.IdentifierManager;
75
import edu.ucsb.nceas.metacat.McdbDocNotFoundException;
76
import edu.ucsb.nceas.metacat.McdbException;
77
import edu.ucsb.nceas.metacat.MetaCatServlet;
78
import edu.ucsb.nceas.metacat.MetacatHandler;
79
import edu.ucsb.nceas.metacat.accesscontrol.AccessControlException;
80
import edu.ucsb.nceas.metacat.client.InsufficientKarmaException;
81
import edu.ucsb.nceas.metacat.dataone.hazelcast.HazelcastService;
82
import edu.ucsb.nceas.metacat.dataquery.MetacatDatabaseConnectionPoolFactory;
83
import edu.ucsb.nceas.metacat.properties.PropertyService;
84
import edu.ucsb.nceas.metacat.replication.ReplicationService;
85
import edu.ucsb.nceas.metacat.shared.AccessException;
86
import edu.ucsb.nceas.metacat.shared.HandlerException;
87
import edu.ucsb.nceas.metacat.util.DocumentUtil;
88
import edu.ucsb.nceas.utilities.ParseLSIDException;
89
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
90

    
91
public class SystemMetadataFactory {
92

    
93
	private static final String resourceMapPrefix = "resourceMap_";
94
	private static Logger logMetacat = Logger.getLogger(SystemMetadataFactory.class);
95
	
96
	/**
97
	 * Creates a system metadata object for insertion into metacat
98
	 * 
99
	 * @param localId
100
	 *            The local document identifier
101
	 * @param user
102
	 *            The user submitting the system metadata document
103
	 * @param groups
104
	 *            The groups the user belongs to
105
	 * 
106
	 * @return sysMeta The system metadata object created
107
	 * @throws SAXException 
108
	 * @throws HandlerException 
109
	 * @throws AccessControlException 
110
	 * @throws AccessException 
111
	 */
112
	public static SystemMetadata createSystemMetadata(String localId, boolean includeORE, boolean downloadData)
113
			throws McdbException, McdbDocNotFoundException, SQLException,
114
			IOException, AccessionNumberException, ClassNotFoundException,
115
			InsufficientKarmaException, ParseLSIDException,
116
			PropertyNotFoundException, BaseException, NoSuchAlgorithmException,
117
			JiBXException, AccessControlException, HandlerException, SAXException, AccessException {
118
		
119
		logMetacat.debug("MetacatHandler.createSystemMetadata() called.");
120
		logMetacat.debug("provided localId: " + localId);
121

    
122
		// create system metadata for the document
123
		SystemMetadata sysMeta = new SystemMetadata();
124
		sysMeta.setSerialVersion(BigInteger.valueOf(1));
125
		AccessionNumber accNum = new AccessionNumber(localId, "NONE");
126
		int rev = Integer.valueOf(accNum.getRev());
127

    
128
		String guid = null;
129
		try {
130
			// get the guid if it exists
131
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
132
		} catch (McdbDocNotFoundException dnfe) {
133
			// otherwise create the mapping
134
			logMetacat.debug("There was a problem getting the guid from "
135
							+ "the given localId (docid and revision). The error message was: "
136
							+ dnfe.getMessage());
137
			logMetacat.debug("No guid in the identifier table.  adding it for " + localId);
138
			IdentifierManager.getInstance().createMapping(localId, localId);
139
			logMetacat.debug("Mapping created for " + localId);
140
			logMetacat.debug("accessionNumber: " + accNum);
141
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
142
		}
143
		Identifier identifier = new Identifier();
144
		identifier.setValue(guid);
145

    
146
		// set the id
147
		sysMeta.setIdentifier(identifier);
148

    
149
		// get the data or metadata object
150
		InputStream inputStream;
151
		try {
152
			inputStream = MetacatHandler.read(localId);
153
		} catch (ParseLSIDException ple) {
154
			logMetacat.debug("There was a problem parsing the LSID from "
155
					+ localId + ". The error message was: " + ple.getMessage());
156
			throw ple;
157

    
158
		} catch (PropertyNotFoundException pnfe) {
159
			logMetacat.debug("There was a problem finding a property. "
160
					+ "The error message was: " + pnfe.getMessage());
161
			throw pnfe;
162

    
163
		} catch (McdbException me) {
164
			logMetacat.debug("There was a Metacat problem. "
165
					+ "The error message was: " + me.getMessage());
166
			throw me;
167

    
168
		} catch (SQLException sqle) {
169
			logMetacat.debug("There was a SQL problem. "
170
					+ "The error message was: " + sqle.getMessage());
171
			throw sqle;
172

    
173
		} catch (ClassNotFoundException cnfe) {
174
			logMetacat.debug("There was a problem finding a class. "
175
					+ "The error message was: " + cnfe.getMessage());
176
			throw cnfe;
177

    
178
		} catch (IOException ioe) {
179
			logMetacat.debug("There was an I/O exception. "
180
					+ "The error message was: " + ioe.getMessage());
181
			throw ioe;
182

    
183
		} // end try()
184

    
185
		// get additional docinfo
186
		Hashtable<String, String> docInfo = ReplicationService.getDocumentInfoMap(localId);
187
		// set the default object format
188
		String doctype = docInfo.get("doctype");
189
		ObjectFormatIdentifier fmtid = null;
190

    
191
		// set the object format, fall back to defaults
192
		try {
193
			fmtid = ObjectFormatCache.getInstance().getFormat(doctype).getFormatId();
194
		} catch (NotFound nfe) {
195

    
196
			try {
197
				// format is not registered, use default
198
				if (doctype.trim().equals("BIN")) {
199
					fmtid = ObjectFormatCache.getInstance().getFormat(
200
							"application/octet-stream").getFormatId();
201

    
202
				} else {
203
					fmtid = ObjectFormatCache.getInstance().getFormat(
204
							"text/plain").getFormatId();
205
				}
206

    
207
			} catch (NotFound nf) {
208
				logMetacat.error("There was a problem getting the default format "
209
								+ "from the ObjectFormatCache: "
210
								+ nf.getMessage());
211
				throw nf;
212
			}
213

    
214
		}
215

    
216
		sysMeta.setFormatId(fmtid);
217
		logMetacat.debug("The ObjectFormat for " + localId + " is " + fmtid.getValue());
218

    
219
		// create the checksum
220
		inputStream = MetacatHandler.read(localId);
221
		String algorithm = "MD5";
222
		Checksum checksum = ChecksumUtil.checksum(inputStream, algorithm);
223
		sysMeta.setChecksum(checksum);
224
		
225
		// set the size
226
		inputStream = MetacatHandler.read(localId);
227
		String sizeStr = new Long(sizeOfStream(inputStream)).toString();
228
		sysMeta.setSize(new BigInteger(sizeStr));
229
		
230
		// submitter
231
		Subject submitter = new Subject();
232
		submitter.setValue(docInfo.get("user_updated"));
233
		sysMeta.setSubmitter(submitter);
234
		
235
		// rights holder
236
		Subject owner = new Subject();
237
		owner.setValue(docInfo.get("user_owner"));
238
		sysMeta.setRightsHolder(owner);
239

    
240
		// dates
241
		String createdDateString = docInfo.get("date_created");
242
		String updatedDateString = docInfo.get("date_updated");
243
		Date createdDate = DateTimeMarshaller.deserializeDateToUTC(createdDateString);
244
		Date updatedDate = DateTimeMarshaller.deserializeDateToUTC(updatedDateString);  
245
		sysMeta.setDateUploaded(createdDate);
246
		sysMeta.setDateSysMetadataModified(updatedDate);
247
		
248
		// set the revision history
249
		String docidWithoutRev = accNum.getDocid();
250
		Identifier obsoletedBy = null;
251
		Identifier obsoletes = null;
252
		Vector<Integer> revisions = DBUtil.getRevListFromRevisionTable(docidWithoutRev);
253
		// ensure this ordering since processing depends on it
254
		Collections.sort(revisions);
255
		for (int existingRev: revisions) {
256
			// use the docid+rev as the guid
257
			String existingPid = docidWithoutRev + "." + existingRev;
258
			if (existingRev < rev) {
259
				// it's the old docid, until it's not
260
				obsoletes = new Identifier();
261
				obsoletes.setValue(existingPid);
262
			}
263
			if (existingRev > rev) {
264
				// it's the newer docid
265
				obsoletedBy = new Identifier();
266
				obsoletedBy.setValue(existingPid);
267
				// only want the version just after it
268
				break;
269
			}
270
		}
271
		// set them on our object
272
		sysMeta.setObsoletedBy(obsoletedBy);
273
		sysMeta.setObsoletes(obsoletes);
274
		
275
		// update the system metadata for the object[s] we are revising
276
		if (obsoletedBy != null) {
277
			//SystemMetadata obsoletedBySysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletedBy);
278
			SystemMetadata obsoletedBySysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletedBy.getValue());
279
			if (obsoletedBySysMeta != null) {
280
				obsoletedBySysMeta.setObsoletes(identifier);
281
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletedBy, obsoletedBySysMeta);
282
			}
283
		}
284
		if (obsoletes != null) {
285
			//SystemMetadata obsoletesSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletes);
286
			SystemMetadata obsoletesSysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletes.getValue());
287
			if (obsoletesSysMeta != null) {
288
				obsoletesSysMeta.setObsoletedBy(identifier);
289
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletes, obsoletesSysMeta);
290
			}
291
		}
292
		
293
		// look up the access control policy we have in metacat
294
		AccessPolicy accessPolicy = IdentifierManager.getInstance().getAccessPolicy(guid);
295
		sysMeta.setAccessPolicy(accessPolicy);
296
		
297
		// authoritative node
298
		NodeReference nr = new NodeReference();
299
		nr.setValue(PropertyService.getProperty("dataone.memberNodeId"));
300
		sysMeta.setOriginMemberNode(nr);
301
		sysMeta.setAuthoritativeMemberNode(nr);
302
		
303
		// further parse EML documents to get data object format,
304
		// describes and describedBy information
305
		if (fmtid == ObjectFormatCache.getInstance().getFormat(
306
				"eml://ecoinformatics.org/eml-2.0.0").getFormatId()
307
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
308
						"eml://ecoinformatics.org/eml-2.0.1").getFormatId()
309
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
310
						"eml://ecoinformatics.org/eml-2.1.0").getFormatId()
311
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
312
						"eml://ecoinformatics.org/eml-2.1.1").getFormatId()) {
313

    
314
			try {
315
				inputStream = MetacatHandler.read(localId);
316
				DatabaseConnectionPoolInterface connectionPool = 
317
					MetacatDatabaseConnectionPoolFactory.getDatabaseConnectionPoolInterface();
318
				DataManager dataManager = 
319
					DataManager.getInstance(connectionPool, connectionPool.getDBAdapterName());
320
				DataPackage dataPackage = dataManager.parseMetadata(inputStream);
321

    
322
				// iterate through the data objects in the EML doc and add sysmeta
323
				logMetacat.debug("In createSystemMetadata() the number of data "
324
								+ "entities is: "
325
								+ dataPackage.getEntityNumber());
326

    
327
				// for generating the ORE map
328
	            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
329
	            List<Identifier> dataIds = new ArrayList<Identifier>();
330
				
331
				// iterate through data objects described by the EML
332
	            if (dataPackage.getEntityList() != null) {
333
					for (int j = 0; j < dataPackage.getEntityList().length; j++) {
334
	
335
						String dataDocUrl = dataPackage.getEntityList()[j].getURL();
336
						String dataDocMimeType = dataPackage.getEntityList()[j].getDataFormat();
337
						// default to binary
338
						if (dataDocMimeType == null) {
339
							dataDocMimeType = ObjectFormatCache.getInstance()
340
									.getFormat("application/octet-stream")
341
									.getFormatId().getValue();
342
						}
343

    
344
						// process the data
345
						boolean remoteData = false;
346
						String dataDocLocalId = null;
347
						Identifier dataGuid = new Identifier();
348

    
349
						// handle ecogrid, or downloadable data
350
						String ecogridPrefix = "ecogrid://knb/";
351
						if (dataDocUrl.trim().startsWith(ecogridPrefix)) {
352
							dataDocLocalId = dataDocUrl.substring(dataDocUrl.indexOf(ecogridPrefix) + ecogridPrefix.length());
353
						} else {
354
							// should we try downloading the remote data?
355
							if (downloadData) {
356
								InputStream dataObject = null;
357
								try {
358
									// download the data from the URL
359
									URL dataURL = new URL(dataDocUrl);
360
									dataObject = dataURL.openStream();
361
									// TODO: check for valid content
362
								} catch (Exception e) {
363
									// error with the download
364
									logMetacat.warn("Error downloading remote data. " + e.getMessage());
365
								}
366
								
367
								if (dataObject != null) {
368
									// create the local version of it
369
									dataDocLocalId = DocumentUtil.generateDocumentId(1);
370
									IdentifierManager.getInstance().createMapping(dataDocLocalId, dataDocLocalId);
371
									dataGuid.setValue(dataDocLocalId);
372
									
373
									// save it locally
374
									Session session = new Session();
375
									session.setSubject(submitter);
376
									MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
377
									MNodeService.getInstance(request).insertDataObject(dataObject, dataGuid, session);
378
									
379
									remoteData = true;
380
								}
381
							}
382
							
383
						}
384
						
385
						logMetacat.debug("Data local ID: " + dataDocLocalId);
386
						logMetacat.debug("Data URL     : " + dataDocUrl);
387
						logMetacat.debug("Data mime    : " + dataDocMimeType);
388
						
389
						// now we have a local id for the data
390
						if (dataDocLocalId != null) {
391
							
392
							// look up the guid for the data
393
							String dataDocid = DocumentUtil.getSmartDocId(dataDocLocalId);
394
							int dataRev = DocumentUtil.getRevisionFromAccessionNumber(dataDocLocalId);
395
	
396
							// check if data system metadata exists already
397
							SystemMetadata dataSysMeta = null;
398
							String dataGuidString = null;
399
							try {
400
								// look for the identifier
401
								dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
402
								// set it
403
								dataGuid.setValue(dataGuidString);
404
								// look up the system metadata
405
								try {
406
									dataSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(dataGuid);
407
								} catch (Exception e) {
408
									// probably not in the system
409
									dataSysMeta = null;
410
								}
411
								//dataSysMeta = IdentifierManager.getInstance().getSystemMetadata(dataGuidString);
412
							} catch (McdbDocNotFoundException nf) {
413
								// we didn't find it
414
								dataSysMeta = null;
415
							}
416
								
417
							// we'll have to generate it	
418
							if (dataSysMeta == null) {
419
								// System metadata for data doesn't exist yet, so create it
420
								logMetacat.debug("There was not an existing system metadata document for " + dataDocLocalId);
421
								try {
422
									logMetacat.debug("Creating a system metadata " + "document for " + dataDocLocalId);
423
									dataSysMeta = createSystemMetadata(dataDocLocalId, includeORE, false);
424
	
425
									// now look it up again
426
									dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
427
	
428
									// set the guid
429
									dataGuid.setValue(dataGuidString);
430
	
431
									// set object format
432
									logMetacat.debug("Updating system metadata for "
433
													+ dataGuid.getValue() + " to "
434
													+ dataDocMimeType);
435
									try {
436
										ObjectFormatIdentifier fmt = 
437
											ObjectFormatCache.getInstance().getFormat(dataDocMimeType).getFormatId();
438
										dataSysMeta.setFormatId(fmt);
439
									} catch (NotFound nfe) {
440
										logMetacat.debug("Couldn't find format identifier for: "
441
														+ dataDocMimeType
442
														+ ". Setting it to application/octet-stream.");
443
										ObjectFormatIdentifier newFmtid = new ObjectFormatIdentifier();
444
										newFmtid.setValue("application/octet-stream");
445
									}
446
									
447
									// inherit access rules from metadata, if we don't have our own
448
									if (remoteData) {
449
										dataSysMeta.setAccessPolicy(sysMeta.getAccessPolicy());
450
										// TODO: use access rules defined in EML, per data file
451
									}
452
									
453
									// update the values
454
									HazelcastService.getInstance().getSystemMetadataMap().put(dataSysMeta.getIdentifier(), dataSysMeta);
455
									
456
	
457
								} catch (McdbDocNotFoundException mdnf) {
458
									mdnf.printStackTrace();
459
									throw mdnf;
460
								} catch (NumberFormatException nfe) {
461
									nfe.printStackTrace();
462
									throw nfe;
463
								} catch (AccessionNumberException ane) {
464
									ane.printStackTrace();
465
									throw ane;
466
								} catch (SQLException sqle) {
467
									sqle.printStackTrace();
468
									throw sqle;
469
								} catch (NoSuchAlgorithmException nsae) {
470
									nsae.printStackTrace();
471
									throw nsae;
472
								} catch (IOException ioe) {
473
									ioe.printStackTrace();
474
									throw ioe;
475
								} catch (PropertyNotFoundException pnfe) {
476
									pnfe.printStackTrace();
477
									throw pnfe;
478
								} catch (BaseException be) {
479
									be.printStackTrace();
480
									throw be;
481
								}	
482
								
483
							}
484
							
485
							// part of the ORE package
486
							dataIds.add(dataGuid);
487
	
488
						} // end if (EML package)
489
	
490
					} // end for (data entities)
491
					
492
	            } // data entities not null
493
	            
494
				// ORE map
495
				if (includeORE) {
496
					// can we generate them?
497
			        if (!dataIds.isEmpty()) {
498
			        	// it doesn't exist in the system?
499
			        	if (!oreExistsFor(sysMeta.getIdentifier())) {
500
			        	
501
				            // generate the ORE map for this datapackage
502
				            Identifier resourceMapId = new Identifier();
503
				            // want to be able to run this over and over again for now
504
				            resourceMapId.setValue(resourceMapPrefix + sysMeta.getIdentifier().getValue());
505
				            idMap.put(sysMeta.getIdentifier(), dataIds);
506
				            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(resourceMapId, idMap);
507
				            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
508
				            // copy most of the same system metadata as the packaging metadata
509
				            SystemMetadata resourceMapSysMeta = new SystemMetadata();
510
				            BeanUtils.copyProperties(resourceMapSysMeta, sysMeta);
511
				            resourceMapSysMeta.setIdentifier(resourceMapId);
512
				            Checksum oreChecksum = ChecksumUtil.checksum(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), "MD5");
513
							resourceMapSysMeta.setChecksum(oreChecksum);
514
				            ObjectFormatIdentifier formatId = ObjectFormatCache.getInstance().getFormat("http://www.openarchives.org/ore/terms").getFormatId();
515
							resourceMapSysMeta.setFormatId(formatId);
516
							resourceMapSysMeta.setSize(BigInteger.valueOf(sizeOfStream(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING))));
517
							
518
							// set the revision graph
519
							resourceMapSysMeta.setObsoletes(null);
520
							resourceMapSysMeta.setObsoletedBy(null);
521
							// look up the resource map that this one obsoletes
522
							if (sysMeta.getObsoletes() != null) {
523
								Identifier resourceMapObsoletes = new Identifier();
524
								resourceMapObsoletes.setValue(resourceMapPrefix + sysMeta.getObsoletes().getValue());
525
								resourceMapSysMeta.setObsoletes(resourceMapObsoletes);
526
								SystemMetadata resourceMapObsoletesSystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletes);
527
								if (resourceMapObsoletesSystemMetadata != null) {
528
									resourceMapObsoletesSystemMetadata.setObsoletedBy(resourceMapId);
529
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletes, resourceMapObsoletesSystemMetadata);
530
								}
531
							}
532
							// look up the resource map that this one is obsoletedBy
533
							if (sysMeta.getObsoletedBy() != null) {
534
								Identifier resourceMapObsoletedBy = new Identifier();
535
								resourceMapObsoletedBy.setValue(resourceMapPrefix + sysMeta.getObsoletedBy().getValue());
536
								resourceMapSysMeta.setObsoletedBy(resourceMapObsoletedBy);
537
								SystemMetadata resourceMapObsoletedBySystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletedBy);
538
								if (resourceMapObsoletedBySystemMetadata != null) {
539
									resourceMapObsoletedBySystemMetadata.setObsoletes(resourceMapId);
540
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletedBy, resourceMapObsoletedBySystemMetadata);
541
								}
542
							}
543
				            
544
							// save it locally
545
							Session session = new Session();
546
							session.setSubject(submitter);
547
							MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
548
							MNodeService.getInstance(request).insertDataObject(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), resourceMapId, session);
549
							MNodeService.getInstance(request).insertSystemMetadata(resourceMapSysMeta);
550
			        	}
551
			        }
552
				}
553

    
554
			} catch (ParserConfigurationException pce) {
555
				logMetacat.debug("There was a problem parsing the EML document. "
556
								+ "The error message was: " + pce.getMessage());
557

    
558
			} catch (SAXException saxe) {
559
				logMetacat.debug("There was a problem traversing the EML document. "
560
								+ "The error message was: " + saxe.getMessage());
561

    
562
			} catch (XPathExpressionException xpee) {
563
				logMetacat.debug("There was a problem searching the EML document. "
564
								+ "The error message was: " + xpee.getMessage());
565
			} catch (Exception e) {
566
				logMetacat.debug("There was a problem creating System Metadata. "
567
								+ "The error message was: " + e.getMessage());
568
				e.printStackTrace();
569
			} // end try()
570

    
571
		} // end if()
572

    
573
		return sysMeta;
574
	}
575
	
576
	/**
577
	 * Determines if we already have registered an ORE map for this package
578
	 * @param guid of the EML/packaging object
579
	 * @return true if there is an ORE map for the given package
580
	 */
581
	private static boolean oreExistsFor(Identifier guid) {
582
		// TODO: implement call to CN.search()
583
		return false;
584
	}
585

    
586
	/**
587
	 * Find the size (in bytes) of a stream. Note: This needs to refactored out
588
	 * of MetacatHandler and into a utility when stream i/o in Metacat is
589
	 * evaluated.
590
	 * 
591
	 * @param is The InputStream of bytes
592
	 * 
593
	 * @return size The size in bytes of the input stream as a long
594
	 * 
595
	 * @throws IOException
596
	 */
597
	private static long sizeOfStream(InputStream is) throws IOException {
598

    
599
		long size = 0;
600
		byte[] b = new byte[1024];
601
		int numread = is.read(b, 0, 1024);
602
		while (numread != -1) {
603
			size += numread;
604
			numread = is.read(b, 0, 1024);
605
		}
606
		return size;
607

    
608
	}
609
}
(5-5/5)