Project

General

Profile

1
/**
2
 *  '$RCSfile$'
3
 *    Purpose: A Class for upgrading the database to version 1.5
4
 *  Copyright: 2000 Regents of the University of California and the
5
 *             National Center for Ecological Analysis and Synthesis
6
 *    Authors: Saurabh Garg
7
 *
8
 *   '$Author: leinfelder $'
9
 *     '$Date: 2012-01-13 14:01:37 -0800 (Fri, 13 Jan 2012) $'
10
 * '$Revision: 6907 $'
11
 *
12
 * This program is free software; you can redistribute it and/or modify
13
 * it under the terms of the GNU General Public License as published by
14
 * the Free Software Foundation; either version 2 of the License, or
15
 * (at your option) any later version.
16
 *
17
 * This program is distributed in the hope that it will be useful,
18
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20
 * GNU General Public License for more details.
21
 *
22
 * You should have received a copy of the GNU General Public License
23
 * along with this program; if not, write to the Free Software
24
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
 */
26
package edu.ucsb.nceas.metacat.dataone;
27

    
28
import java.io.IOException;
29
import java.io.InputStream;
30
import java.math.BigInteger;
31
import java.net.URL;
32
import java.net.URLConnection;
33
import java.security.NoSuchAlgorithmException;
34
import java.sql.SQLException;
35
import java.util.ArrayList;
36
import java.util.Collections;
37
import java.util.Date;
38
import java.util.HashMap;
39
import java.util.Hashtable;
40
import java.util.List;
41
import java.util.Map;
42
import java.util.Vector;
43

    
44
import javax.xml.parsers.ParserConfigurationException;
45
import javax.xml.xpath.XPathExpressionException;
46

    
47
import org.apache.commons.beanutils.BeanUtils;
48
import org.apache.commons.io.IOUtils;
49
import org.apache.log4j.Logger;
50
import org.apache.wicket.protocol.http.MockHttpServletRequest;
51
import org.dataone.client.ObjectFormatCache;
52
import org.dataone.ore.ResourceMapFactory;
53
import org.dataone.service.exceptions.BaseException;
54
import org.dataone.service.exceptions.NotFound;
55
import org.dataone.service.types.v1.AccessPolicy;
56
import org.dataone.service.types.v1.Checksum;
57
import org.dataone.service.types.v1.Identifier;
58
import org.dataone.service.types.v1.NodeReference;
59
import org.dataone.service.types.v1.ObjectFormatIdentifier;
60
import org.dataone.service.types.v1.Session;
61
import org.dataone.service.types.v1.Subject;
62
import org.dataone.service.types.v1.SystemMetadata;
63
import org.dataone.service.types.v1.util.ChecksumUtil;
64
import org.dataone.service.util.DateTimeMarshaller;
65
import org.dspace.foresite.ResourceMap;
66
import org.ecoinformatics.datamanager.DataManager;
67
import org.ecoinformatics.datamanager.database.DatabaseConnectionPoolInterface;
68
import org.ecoinformatics.datamanager.parser.DataPackage;
69
import org.jibx.runtime.JiBXException;
70
import org.xml.sax.SAXException;
71

    
72
import edu.ucsb.nceas.metacat.AccessionNumber;
73
import edu.ucsb.nceas.metacat.AccessionNumberException;
74
import edu.ucsb.nceas.metacat.DBUtil;
75
import edu.ucsb.nceas.metacat.IdentifierManager;
76
import edu.ucsb.nceas.metacat.McdbDocNotFoundException;
77
import edu.ucsb.nceas.metacat.McdbException;
78
import edu.ucsb.nceas.metacat.MetaCatServlet;
79
import edu.ucsb.nceas.metacat.MetacatHandler;
80
import edu.ucsb.nceas.metacat.accesscontrol.AccessControlException;
81
import edu.ucsb.nceas.metacat.client.InsufficientKarmaException;
82
import edu.ucsb.nceas.metacat.dataone.hazelcast.HazelcastService;
83
import edu.ucsb.nceas.metacat.dataquery.MetacatDatabaseConnectionPoolFactory;
84
import edu.ucsb.nceas.metacat.properties.PropertyService;
85
import edu.ucsb.nceas.metacat.replication.ReplicationService;
86
import edu.ucsb.nceas.metacat.shared.AccessException;
87
import edu.ucsb.nceas.metacat.shared.HandlerException;
88
import edu.ucsb.nceas.metacat.util.DocumentUtil;
89
import edu.ucsb.nceas.utilities.ParseLSIDException;
90
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
91

    
92
public class SystemMetadataFactory {
93

    
94
	private static final String resourceMapPrefix = "resourceMap_";
95
	private static Logger logMetacat = Logger.getLogger(SystemMetadataFactory.class);
96
	
97
	/**
98
	 * Creates a system metadata object for insertion into metacat
99
	 * 
100
	 * @param localId
101
	 *            The local document identifier
102
	 * @param user
103
	 *            The user submitting the system metadata document
104
	 * @param groups
105
	 *            The groups the user belongs to
106
	 * 
107
	 * @return sysMeta The system metadata object created
108
	 * @throws SAXException 
109
	 * @throws HandlerException 
110
	 * @throws AccessControlException 
111
	 * @throws AccessException 
112
	 */
113
	public static SystemMetadata createSystemMetadata(String localId, boolean includeORE, boolean downloadData)
114
			throws McdbException, McdbDocNotFoundException, SQLException,
115
			IOException, AccessionNumberException, ClassNotFoundException,
116
			InsufficientKarmaException, ParseLSIDException,
117
			PropertyNotFoundException, BaseException, NoSuchAlgorithmException,
118
			JiBXException, AccessControlException, HandlerException, SAXException, AccessException {
119
		
120
		logMetacat.debug("MetacatHandler.createSystemMetadata() called.");
121
		logMetacat.debug("provided localId: " + localId);
122

    
123
		// create system metadata for the document
124
		SystemMetadata sysMeta = new SystemMetadata();
125
		sysMeta.setSerialVersion(BigInteger.valueOf(1));
126
		AccessionNumber accNum = new AccessionNumber(localId, "NONE");
127
		int rev = Integer.valueOf(accNum.getRev());
128

    
129
		String guid = null;
130
		try {
131
			// get the guid if it exists
132
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
133
		} catch (McdbDocNotFoundException dnfe) {
134
			// otherwise create the mapping
135
			logMetacat.debug("There was a problem getting the guid from "
136
							+ "the given localId (docid and revision). The error message was: "
137
							+ dnfe.getMessage());
138
			logMetacat.debug("No guid in the identifier table.  adding it for " + localId);
139
			IdentifierManager.getInstance().createMapping(localId, localId);
140
			logMetacat.debug("Mapping created for " + localId);
141
			logMetacat.debug("accessionNumber: " + accNum);
142
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
143
		}
144
		Identifier identifier = new Identifier();
145
		identifier.setValue(guid);
146

    
147
		// set the id
148
		sysMeta.setIdentifier(identifier);
149

    
150
		// get the data or metadata object
151
		InputStream inputStream;
152
		try {
153
			inputStream = MetacatHandler.read(localId);
154
		} catch (ParseLSIDException ple) {
155
			logMetacat.debug("There was a problem parsing the LSID from "
156
					+ localId + ". The error message was: " + ple.getMessage());
157
			throw ple;
158

    
159
		} catch (PropertyNotFoundException pnfe) {
160
			logMetacat.debug("There was a problem finding a property. "
161
					+ "The error message was: " + pnfe.getMessage());
162
			throw pnfe;
163

    
164
		} catch (McdbException me) {
165
			logMetacat.debug("There was a Metacat problem. "
166
					+ "The error message was: " + me.getMessage());
167
			throw me;
168

    
169
		} catch (SQLException sqle) {
170
			logMetacat.debug("There was a SQL problem. "
171
					+ "The error message was: " + sqle.getMessage());
172
			throw sqle;
173

    
174
		} catch (ClassNotFoundException cnfe) {
175
			logMetacat.debug("There was a problem finding a class. "
176
					+ "The error message was: " + cnfe.getMessage());
177
			throw cnfe;
178

    
179
		} catch (IOException ioe) {
180
			logMetacat.debug("There was an I/O exception. "
181
					+ "The error message was: " + ioe.getMessage());
182
			throw ioe;
183

    
184
		} // end try()
185

    
186
		// get additional docinfo
187
		Hashtable<String, String> docInfo = ReplicationService.getDocumentInfoMap(localId);
188
		// set the default object format
189
		String doctype = docInfo.get("doctype");
190
		ObjectFormatIdentifier fmtid = null;
191

    
192
		// set the object format, fall back to defaults
193
		try {
194
			fmtid = ObjectFormatCache.getInstance().getFormat(doctype).getFormatId();
195
		} catch (NotFound nfe) {
196

    
197
			try {
198
				// format is not registered, use default
199
				if (doctype.trim().equals("BIN")) {
200
					fmtid = ObjectFormatCache.getInstance().getFormat(
201
							"application/octet-stream").getFormatId();
202

    
203
				} else {
204
					fmtid = ObjectFormatCache.getInstance().getFormat(
205
							"text/plain").getFormatId();
206
				}
207

    
208
			} catch (NotFound nf) {
209
				logMetacat.error("There was a problem getting the default format "
210
								+ "from the ObjectFormatCache: "
211
								+ nf.getMessage());
212
				throw nf;
213
			}
214

    
215
		}
216

    
217
		sysMeta.setFormatId(fmtid);
218
		logMetacat.debug("The ObjectFormat for " + localId + " is " + fmtid.getValue());
219

    
220
		// create the checksum
221
		inputStream = MetacatHandler.read(localId);
222
		String algorithm = "MD5";
223
		Checksum checksum = ChecksumUtil.checksum(inputStream, algorithm);
224
		sysMeta.setChecksum(checksum);
225
		
226
		// set the size
227
		inputStream = MetacatHandler.read(localId);
228
		String sizeStr = new Long(sizeOfStream(inputStream)).toString();
229
		sysMeta.setSize(new BigInteger(sizeStr));
230
		
231
		// submitter
232
		Subject submitter = new Subject();
233
		submitter.setValue(docInfo.get("user_updated"));
234
		sysMeta.setSubmitter(submitter);
235
		
236
		// rights holder
237
		Subject owner = new Subject();
238
		owner.setValue(docInfo.get("user_owner"));
239
		sysMeta.setRightsHolder(owner);
240

    
241
		// dates
242
		String createdDateString = docInfo.get("date_created");
243
		String updatedDateString = docInfo.get("date_updated");
244
		Date createdDate = DateTimeMarshaller.deserializeDateToUTC(createdDateString);
245
		Date updatedDate = DateTimeMarshaller.deserializeDateToUTC(updatedDateString);  
246
		sysMeta.setDateUploaded(createdDate);
247
		sysMeta.setDateSysMetadataModified(updatedDate);
248
		
249
		// set the revision history
250
		String docidWithoutRev = accNum.getDocid();
251
		Identifier obsoletedBy = null;
252
		Identifier obsoletes = null;
253
		Vector<Integer> revisions = DBUtil.getRevListFromRevisionTable(docidWithoutRev);
254
		// ensure this ordering since processing depends on it
255
		Collections.sort(revisions);
256
		for (int existingRev: revisions) {
257
			// use the docid+rev as the guid
258
			String existingPid = docidWithoutRev + "." + existingRev;
259
			if (existingRev < rev) {
260
				// it's the old docid, until it's not
261
				obsoletes = new Identifier();
262
				obsoletes.setValue(existingPid);
263
			}
264
			if (existingRev > rev) {
265
				// it's the newer docid
266
				obsoletedBy = new Identifier();
267
				obsoletedBy.setValue(existingPid);
268
				// only want the version just after it
269
				break;
270
			}
271
		}
272
		// set them on our object
273
		sysMeta.setObsoletedBy(obsoletedBy);
274
		sysMeta.setObsoletes(obsoletes);
275
		
276
		// update the system metadata for the object[s] we are revising
277
		if (obsoletedBy != null) {
278
			//SystemMetadata obsoletedBySysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletedBy);
279
			SystemMetadata obsoletedBySysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletedBy.getValue());
280
			if (obsoletedBySysMeta != null) {
281
				obsoletedBySysMeta.setObsoletes(identifier);
282
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletedBy, obsoletedBySysMeta);
283
			}
284
		}
285
		if (obsoletes != null) {
286
			//SystemMetadata obsoletesSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletes);
287
			SystemMetadata obsoletesSysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletes.getValue());
288
			if (obsoletesSysMeta != null) {
289
				obsoletesSysMeta.setObsoletedBy(identifier);
290
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletes, obsoletesSysMeta);
291
			}
292
		}
293
		
294
		// look up the access control policy we have in metacat
295
		AccessPolicy accessPolicy = IdentifierManager.getInstance().getAccessPolicy(guid);
296
		sysMeta.setAccessPolicy(accessPolicy);
297
		
298
		// authoritative node
299
		NodeReference nr = new NodeReference();
300
		nr.setValue(PropertyService.getProperty("dataone.memberNodeId"));
301
		sysMeta.setOriginMemberNode(nr);
302
		sysMeta.setAuthoritativeMemberNode(nr);
303
		
304
		// further parse EML documents to get data object format,
305
		// describes and describedBy information
306
		if (fmtid == ObjectFormatCache.getInstance().getFormat(
307
				"eml://ecoinformatics.org/eml-2.0.0").getFormatId()
308
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
309
						"eml://ecoinformatics.org/eml-2.0.1").getFormatId()
310
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
311
						"eml://ecoinformatics.org/eml-2.1.0").getFormatId()
312
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
313
						"eml://ecoinformatics.org/eml-2.1.1").getFormatId()) {
314

    
315
			try {
316
				inputStream = MetacatHandler.read(localId);
317
				DatabaseConnectionPoolInterface connectionPool = 
318
					MetacatDatabaseConnectionPoolFactory.getDatabaseConnectionPoolInterface();
319
				DataManager dataManager = 
320
					DataManager.getInstance(connectionPool, connectionPool.getDBAdapterName());
321
				DataPackage dataPackage = dataManager.parseMetadata(inputStream);
322

    
323
				// iterate through the data objects in the EML doc and add sysmeta
324
				logMetacat.debug("In createSystemMetadata() the number of data "
325
								+ "entities is: "
326
								+ dataPackage.getEntityNumber());
327

    
328
				// for generating the ORE map
329
	            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
330
	            List<Identifier> dataIds = new ArrayList<Identifier>();
331
				
332
				// iterate through data objects described by the EML
333
	            if (dataPackage.getEntityList() != null) {
334
					for (int j = 0; j < dataPackage.getEntityList().length; j++) {
335
	
336
						String dataDocUrl = dataPackage.getEntityList()[j].getURL();
337
						String dataDocMimeType = dataPackage.getEntityList()[j].getDataFormat();
338
						// default to binary
339
						if (dataDocMimeType == null) {
340
							dataDocMimeType = ObjectFormatCache.getInstance()
341
									.getFormat("application/octet-stream")
342
									.getFormatId().getValue();
343
						}
344

    
345
						// process the data
346
						boolean remoteData = false;
347
						String dataDocLocalId = null;
348
						Identifier dataGuid = new Identifier();
349

    
350
						// handle ecogrid, or downloadable data
351
						String ecogridPrefix = "ecogrid://knb/";
352
						if (dataDocUrl.trim().startsWith(ecogridPrefix)) {
353
							dataDocLocalId = dataDocUrl.substring(dataDocUrl.indexOf(ecogridPrefix) + ecogridPrefix.length());
354
						} else {
355
							// should we try downloading the remote data?
356
							if (downloadData) {
357
								InputStream dataObject = null;
358
								try {
359
									// download the data from the URL
360
									URL dataURL = new URL(dataDocUrl);
361
									URLConnection dataConnection = dataURL.openConnection();
362
									
363
									// default is to download the data
364
									dataObject = dataConnection.getInputStream();
365

    
366
									String detectedContentType = dataConnection.getContentType();
367
									logMetacat.info("Detected content type: " + detectedContentType);
368

    
369
									if (detectedContentType != null) {
370
										// seems to be HTML from the remote location
371
										if (detectedContentType.contains("html")) {
372
											// if we are not expecting it, we skip it
373
											if (!dataDocMimeType.contains("html")) {
374
												// set to null so we don't download it
375
												dataObject = null;
376
												logMetacat.warn("Skipping remote resource, unexpected HTML content type at: " + dataDocUrl);
377
											}
378
										}
379
										
380
										// TODO: any other special processing (csv, images, etc)?
381
									} else {
382
										// if we don't know what it is, should we skip it?
383
										dataObject = null;
384
										logMetacat.warn("Skipping remote resource, unknown content type at: " + dataDocUrl);
385
									}
386
									
387
								} catch (Exception e) {
388
									// error with the download
389
									logMetacat.warn("Error downloading remote data. " + e.getMessage());
390
								}
391
								
392
								if (dataObject != null) {
393
									// create the local version of it
394
									dataDocLocalId = DocumentUtil.generateDocumentId(1);
395
									IdentifierManager.getInstance().createMapping(dataDocLocalId, dataDocLocalId);
396
									dataGuid.setValue(dataDocLocalId);
397
									
398
									// save it locally
399
									Session session = new Session();
400
									session.setSubject(submitter);
401
									MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
402
									MNodeService.getInstance(request).insertDataObject(dataObject, dataGuid, session);
403
									
404
									remoteData = true;
405
								}
406
							}
407
							
408
						}
409
						
410
						logMetacat.debug("Data local ID: " + dataDocLocalId);
411
						logMetacat.debug("Data URL     : " + dataDocUrl);
412
						logMetacat.debug("Data mime    : " + dataDocMimeType);
413
						
414
						// now we have a local id for the data
415
						if (dataDocLocalId != null) {
416
							
417
							// look up the guid for the data
418
							String dataDocid = DocumentUtil.getSmartDocId(dataDocLocalId);
419
							int dataRev = DocumentUtil.getRevisionFromAccessionNumber(dataDocLocalId);
420
	
421
							// check if data system metadata exists already
422
							SystemMetadata dataSysMeta = null;
423
							String dataGuidString = null;
424
							try {
425
								// look for the identifier
426
								dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
427
								// set it
428
								dataGuid.setValue(dataGuidString);
429
								// look up the system metadata
430
								try {
431
									dataSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(dataGuid);
432
								} catch (Exception e) {
433
									// probably not in the system
434
									dataSysMeta = null;
435
								}
436
								//dataSysMeta = IdentifierManager.getInstance().getSystemMetadata(dataGuidString);
437
							} catch (McdbDocNotFoundException nf) {
438
								// we didn't find it
439
								dataSysMeta = null;
440
							}
441
								
442
							// we'll have to generate it	
443
							if (dataSysMeta == null) {
444
								// System metadata for data doesn't exist yet, so create it
445
								logMetacat.debug("There was not an existing system metadata document for " + dataDocLocalId);
446
								try {
447
									logMetacat.debug("Creating a system metadata " + "document for " + dataDocLocalId);
448
									dataSysMeta = createSystemMetadata(dataDocLocalId, includeORE, false);
449
	
450
									// now look it up again
451
									dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
452
	
453
									// set the guid
454
									dataGuid.setValue(dataGuidString);
455
	
456
									// set object format
457
									logMetacat.debug("Updating system metadata for "
458
													+ dataGuid.getValue() + " to "
459
													+ dataDocMimeType);
460
									try {
461
										ObjectFormatIdentifier fmt = 
462
											ObjectFormatCache.getInstance().getFormat(dataDocMimeType).getFormatId();
463
										dataSysMeta.setFormatId(fmt);
464
									} catch (NotFound nfe) {
465
										logMetacat.debug("Couldn't find format identifier for: "
466
														+ dataDocMimeType
467
														+ ". Setting it to application/octet-stream.");
468
										ObjectFormatIdentifier newFmtid = new ObjectFormatIdentifier();
469
										newFmtid.setValue("application/octet-stream");
470
									}
471
									
472
									// inherit access rules from metadata, if we don't have our own
473
									if (remoteData) {
474
										dataSysMeta.setAccessPolicy(sysMeta.getAccessPolicy());
475
										// TODO: use access rules defined in EML, per data file
476
									}
477
									
478
									// update the values
479
									HazelcastService.getInstance().getSystemMetadataMap().put(dataSysMeta.getIdentifier(), dataSysMeta);
480
									
481
	
482
								} catch (McdbDocNotFoundException mdnf) {
483
									mdnf.printStackTrace();
484
									throw mdnf;
485
								} catch (NumberFormatException nfe) {
486
									nfe.printStackTrace();
487
									throw nfe;
488
								} catch (AccessionNumberException ane) {
489
									ane.printStackTrace();
490
									throw ane;
491
								} catch (SQLException sqle) {
492
									sqle.printStackTrace();
493
									throw sqle;
494
								} catch (NoSuchAlgorithmException nsae) {
495
									nsae.printStackTrace();
496
									throw nsae;
497
								} catch (IOException ioe) {
498
									ioe.printStackTrace();
499
									throw ioe;
500
								} catch (PropertyNotFoundException pnfe) {
501
									pnfe.printStackTrace();
502
									throw pnfe;
503
								} catch (BaseException be) {
504
									be.printStackTrace();
505
									throw be;
506
								}	
507
								
508
							}
509
							
510
							// part of the ORE package
511
							dataIds.add(dataGuid);
512
	
513
						} // end if (EML package)
514
	
515
					} // end for (data entities)
516
					
517
	            } // data entities not null
518
	            
519
				// ORE map
520
				if (includeORE) {
521
					// can we generate them?
522
			        if (!dataIds.isEmpty()) {
523
			        	// it doesn't exist in the system?
524
			        	if (!oreExistsFor(sysMeta.getIdentifier())) {
525
			        	
526
				            // generate the ORE map for this datapackage
527
				            Identifier resourceMapId = new Identifier();
528
				            // want to be able to run this over and over again for now
529
				            resourceMapId.setValue(resourceMapPrefix + sysMeta.getIdentifier().getValue());
530
				            idMap.put(sysMeta.getIdentifier(), dataIds);
531
				            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(resourceMapId, idMap);
532
				            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
533
				            // copy most of the same system metadata as the packaging metadata
534
				            SystemMetadata resourceMapSysMeta = new SystemMetadata();
535
				            BeanUtils.copyProperties(resourceMapSysMeta, sysMeta);
536
				            resourceMapSysMeta.setIdentifier(resourceMapId);
537
				            Checksum oreChecksum = ChecksumUtil.checksum(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), "MD5");
538
							resourceMapSysMeta.setChecksum(oreChecksum);
539
				            ObjectFormatIdentifier formatId = ObjectFormatCache.getInstance().getFormat("http://www.openarchives.org/ore/terms").getFormatId();
540
							resourceMapSysMeta.setFormatId(formatId);
541
							resourceMapSysMeta.setSize(BigInteger.valueOf(sizeOfStream(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING))));
542
							
543
							// set the revision graph
544
							resourceMapSysMeta.setObsoletes(null);
545
							resourceMapSysMeta.setObsoletedBy(null);
546
							// look up the resource map that this one obsoletes
547
							if (sysMeta.getObsoletes() != null) {
548
								Identifier resourceMapObsoletes = new Identifier();
549
								resourceMapObsoletes.setValue(resourceMapPrefix + sysMeta.getObsoletes().getValue());
550
								resourceMapSysMeta.setObsoletes(resourceMapObsoletes);
551
								SystemMetadata resourceMapObsoletesSystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletes);
552
								if (resourceMapObsoletesSystemMetadata != null) {
553
									resourceMapObsoletesSystemMetadata.setObsoletedBy(resourceMapId);
554
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletes, resourceMapObsoletesSystemMetadata);
555
								}
556
							}
557
							// look up the resource map that this one is obsoletedBy
558
							if (sysMeta.getObsoletedBy() != null) {
559
								Identifier resourceMapObsoletedBy = new Identifier();
560
								resourceMapObsoletedBy.setValue(resourceMapPrefix + sysMeta.getObsoletedBy().getValue());
561
								resourceMapSysMeta.setObsoletedBy(resourceMapObsoletedBy);
562
								SystemMetadata resourceMapObsoletedBySystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletedBy);
563
								if (resourceMapObsoletedBySystemMetadata != null) {
564
									resourceMapObsoletedBySystemMetadata.setObsoletes(resourceMapId);
565
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletedBy, resourceMapObsoletedBySystemMetadata);
566
								}
567
							}
568
				            
569
							// save it locally, if it doesn't already exist
570
							if (!IdentifierManager.getInstance().identifierExists(resourceMapId.getValue())) {
571
								Session session = new Session();
572
								session.setSubject(submitter);
573
								MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
574
								MNodeService.getInstance(request).insertDataObject(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), resourceMapId, session);
575
								MNodeService.getInstance(request).insertSystemMetadata(resourceMapSysMeta);
576
								logMetacat.info("Inserted ORE package: " + resourceMapId.getValue());
577
							}
578
			        	}
579
			        }
580
				}
581

    
582
			} catch (ParserConfigurationException pce) {
583
				logMetacat.debug("There was a problem parsing the EML document. "
584
								+ "The error message was: " + pce.getMessage());
585

    
586
			} catch (SAXException saxe) {
587
				logMetacat.debug("There was a problem traversing the EML document. "
588
								+ "The error message was: " + saxe.getMessage());
589

    
590
			} catch (XPathExpressionException xpee) {
591
				logMetacat.debug("There was a problem searching the EML document. "
592
								+ "The error message was: " + xpee.getMessage());
593
			} catch (Exception e) {
594
				logMetacat.debug("There was a problem creating System Metadata. "
595
								+ "The error message was: " + e.getMessage());
596
				e.printStackTrace();
597
			} // end try()
598

    
599
		} // end if()
600

    
601
		return sysMeta;
602
	}
603
	
604
	/**
605
	 * Determines if we already have registered an ORE map for this package
606
	 * @param guid of the EML/packaging object
607
	 * @return true if there is an ORE map for the given package
608
	 */
609
	private static boolean oreExistsFor(Identifier guid) {
610
		// TODO: implement call to CN.search()
611
		return false;
612
	}
613

    
614
	/**
615
	 * Find the size (in bytes) of a stream. Note: This needs to refactored out
616
	 * of MetacatHandler and into a utility when stream i/o in Metacat is
617
	 * evaluated.
618
	 * 
619
	 * @param is The InputStream of bytes
620
	 * 
621
	 * @return size The size in bytes of the input stream as a long
622
	 * 
623
	 * @throws IOException
624
	 */
625
	private static long sizeOfStream(InputStream is) throws IOException {
626

    
627
		long size = 0;
628
		byte[] b = new byte[1024];
629
		int numread = is.read(b, 0, 1024);
630
		while (numread != -1) {
631
			size += numread;
632
			numread = is.read(b, 0, 1024);
633
		}
634
		return size;
635

    
636
	}
637
}
(5-5/5)