Project

General

Profile

1
/**
2
 *  '$RCSfile$'
3
 *    Purpose: A Class for upgrading the database to version 1.5
4
 *  Copyright: 2000 Regents of the University of California and the
5
 *             National Center for Ecological Analysis and Synthesis
6
 *    Authors: Saurabh Garg
7
 *
8
 *   '$Author: leinfelder $'
9
 *     '$Date: 2011-12-20 11:25:00 -0800 (Tue, 20 Dec 2011) $'
10
 * '$Revision: 6808 $'
11
 *
12
 * This program is free software; you can redistribute it and/or modify
13
 * it under the terms of the GNU General Public License as published by
14
 * the Free Software Foundation; either version 2 of the License, or
15
 * (at your option) any later version.
16
 *
17
 * This program is distributed in the hope that it will be useful,
18
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20
 * GNU General Public License for more details.
21
 *
22
 * You should have received a copy of the GNU General Public License
23
 * along with this program; if not, write to the Free Software
24
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
 */
26
package edu.ucsb.nceas.metacat.dataone;
27

    
28
import java.io.BufferedInputStream;
29
import java.io.IOException;
30
import java.io.InputStream;
31
import java.math.BigInteger;
32
import java.security.NoSuchAlgorithmException;
33
import java.sql.SQLException;
34
import java.util.ArrayList;
35
import java.util.Collections;
36
import java.util.Date;
37
import java.util.HashMap;
38
import java.util.Hashtable;
39
import java.util.List;
40
import java.util.Map;
41
import java.util.Vector;
42

    
43
import javax.xml.parsers.ParserConfigurationException;
44
import javax.xml.xpath.XPathExpressionException;
45

    
46
import org.apache.commons.beanutils.BeanUtils;
47
import org.apache.commons.io.IOUtils;
48
import org.apache.log4j.Logger;
49
import org.apache.wicket.protocol.http.MockHttpServletRequest;
50
import org.dataone.client.ObjectFormatCache;
51
import org.dataone.ore.ResourceMapFactory;
52
import org.dataone.service.exceptions.BaseException;
53
import org.dataone.service.exceptions.NotFound;
54
import org.dataone.service.types.v1.AccessPolicy;
55
import org.dataone.service.types.v1.Checksum;
56
import org.dataone.service.types.v1.Identifier;
57
import org.dataone.service.types.v1.NodeReference;
58
import org.dataone.service.types.v1.ObjectFormatIdentifier;
59
import org.dataone.service.types.v1.Session;
60
import org.dataone.service.types.v1.Subject;
61
import org.dataone.service.types.v1.SystemMetadata;
62
import org.dataone.service.types.v1.util.ChecksumUtil;
63
import org.dataone.service.util.DateTimeMarshaller;
64
import org.dspace.foresite.ResourceMap;
65
import org.ecoinformatics.datamanager.DataManager;
66
import org.ecoinformatics.datamanager.database.DatabaseConnectionPoolInterface;
67
import org.ecoinformatics.datamanager.parser.DataPackage;
68
import org.jibx.runtime.JiBXException;
69
import org.xml.sax.SAXException;
70

    
71
import edu.ucsb.nceas.metacat.AccessionNumber;
72
import edu.ucsb.nceas.metacat.AccessionNumberException;
73
import edu.ucsb.nceas.metacat.DBUtil;
74
import edu.ucsb.nceas.metacat.IdentifierManager;
75
import edu.ucsb.nceas.metacat.McdbDocNotFoundException;
76
import edu.ucsb.nceas.metacat.McdbException;
77
import edu.ucsb.nceas.metacat.MetaCatServlet;
78
import edu.ucsb.nceas.metacat.MetacatHandler;
79
import edu.ucsb.nceas.metacat.accesscontrol.AccessControlException;
80
import edu.ucsb.nceas.metacat.client.InsufficientKarmaException;
81
import edu.ucsb.nceas.metacat.dataone.hazelcast.HazelcastService;
82
import edu.ucsb.nceas.metacat.dataquery.MetacatDatabaseConnectionPoolFactory;
83
import edu.ucsb.nceas.metacat.properties.PropertyService;
84
import edu.ucsb.nceas.metacat.replication.ReplicationService;
85
import edu.ucsb.nceas.metacat.shared.AccessException;
86
import edu.ucsb.nceas.metacat.shared.HandlerException;
87
import edu.ucsb.nceas.metacat.util.DocumentUtil;
88
import edu.ucsb.nceas.utilities.ParseLSIDException;
89
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
90

    
91
public class SystemMetadataFactory {
92

    
93
	private static final String resourceMapPrefix = "resourceMap_";
94
	private static Logger logMetacat = Logger.getLogger(SystemMetadataFactory.class);
95
	
96
	/**
97
	 * Creates a system metadata object for insertion into metacat
98
	 * 
99
	 * @param localId
100
	 *            The local document identifier
101
	 * @param user
102
	 *            The user submitting the system metadata document
103
	 * @param groups
104
	 *            The groups the user belongs to
105
	 * 
106
	 * @return sysMeta The system metadata object created
107
	 * @throws SAXException 
108
	 * @throws HandlerException 
109
	 * @throws AccessControlException 
110
	 * @throws AccessException 
111
	 */
112
	public static SystemMetadata createSystemMetadata(String localId, boolean includeORE)
113
			throws McdbException, McdbDocNotFoundException, SQLException,
114
			IOException, AccessionNumberException, ClassNotFoundException,
115
			InsufficientKarmaException, ParseLSIDException,
116
			PropertyNotFoundException, BaseException, NoSuchAlgorithmException,
117
			JiBXException, AccessControlException, HandlerException, SAXException, AccessException {
118
		
119
		logMetacat.debug("MetacatHandler.createSystemMetadata() called.");
120
		logMetacat.debug("provided localId: " + localId);
121

    
122
		// create system metadata for the document
123
		SystemMetadata sysMeta = new SystemMetadata();
124
		sysMeta.setSerialVersion(BigInteger.valueOf(1));
125
		AccessionNumber accNum = new AccessionNumber(localId, "NONE");
126
		int rev = Integer.valueOf(accNum.getRev());
127

    
128
		String guid = null;
129
		try {
130
			// get the guid if it exists
131
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
132
		} catch (McdbDocNotFoundException dnfe) {
133
			// otherwise create the mapping
134
			logMetacat.debug("There was a problem getting the guid from "
135
							+ "the given localId (docid and revision). The error message was: "
136
							+ dnfe.getMessage());
137
			logMetacat.debug("No guid in the identifier table.  adding it for " + localId);
138
			IdentifierManager.getInstance().createMapping(localId, localId);
139
			logMetacat.debug("Mapping created for " + localId);
140
			logMetacat.debug("accessionNumber: " + accNum);
141
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
142
		}
143
		Identifier identifier = new Identifier();
144
		identifier.setValue(guid);
145

    
146
		// set the id
147
		sysMeta.setIdentifier(identifier);
148

    
149
		// get the data or metadata object
150
		InputStream inputStream;
151
		try {
152
			inputStream = MetacatHandler.read(localId);
153
		} catch (ParseLSIDException ple) {
154
			logMetacat.debug("There was a problem parsing the LSID from "
155
					+ localId + ". The error message was: " + ple.getMessage());
156
			throw ple;
157

    
158
		} catch (PropertyNotFoundException pnfe) {
159
			logMetacat.debug("There was a problem finding a property. "
160
					+ "The error message was: " + pnfe.getMessage());
161
			throw pnfe;
162

    
163
		} catch (McdbException me) {
164
			logMetacat.debug("There was a Metacat problem. "
165
					+ "The error message was: " + me.getMessage());
166
			throw me;
167

    
168
		} catch (SQLException sqle) {
169
			logMetacat.debug("There was a SQL problem. "
170
					+ "The error message was: " + sqle.getMessage());
171
			throw sqle;
172

    
173
		} catch (ClassNotFoundException cnfe) {
174
			logMetacat.debug("There was a problem finding a class. "
175
					+ "The error message was: " + cnfe.getMessage());
176
			throw cnfe;
177

    
178
		} catch (IOException ioe) {
179
			logMetacat.debug("There was an I/O exception. "
180
					+ "The error message was: " + ioe.getMessage());
181
			throw ioe;
182

    
183
		} // end try()
184

    
185
		// get additional docinfo
186
		Hashtable<String, String> docInfo = ReplicationService.getDocumentInfoMap(localId);
187
		// set the default object format
188
		String doctype = docInfo.get("doctype");
189
		ObjectFormatIdentifier fmtid = null;
190

    
191
		// set the object format, fall back to defaults
192
		try {
193
			fmtid = ObjectFormatCache.getInstance().getFormat(doctype).getFormatId();
194
		} catch (NotFound nfe) {
195

    
196
			try {
197
				// format is not registered, use default
198
				if (doctype.trim().equals("BIN")) {
199
					fmtid = ObjectFormatCache.getInstance().getFormat(
200
							"application/octet-stream").getFormatId();
201

    
202
				} else {
203
					fmtid = ObjectFormatCache.getInstance().getFormat(
204
							"text/plain").getFormatId();
205
				}
206

    
207
			} catch (NotFound nf) {
208
				logMetacat.error("There was a problem getting the default format "
209
								+ "from the ObjectFormatCache: "
210
								+ nf.getMessage());
211
				throw nf;
212
			}
213

    
214
		}
215

    
216
		sysMeta.setFormatId(fmtid);
217
		logMetacat.debug("The ObjectFormat for " + localId + " is " + fmtid.getValue());
218

    
219
		// create the checksum
220
		inputStream = MetacatHandler.read(localId);
221
		String algorithm = "MD5";
222
		Checksum checksum = ChecksumUtil.checksum(inputStream, algorithm);
223
		sysMeta.setChecksum(checksum);
224
		
225
		// set the size
226
		inputStream = MetacatHandler.read(localId);
227
		String sizeStr = new Long(sizeOfStream(inputStream)).toString();
228
		sysMeta.setSize(new BigInteger(sizeStr));
229
		
230
		// submitter
231
		Subject submitter = new Subject();
232
		submitter.setValue(docInfo.get("user_updated"));
233
		sysMeta.setSubmitter(submitter);
234
		
235
		// rights holder
236
		Subject owner = new Subject();
237
		owner.setValue(docInfo.get("user_owner"));
238
		sysMeta.setRightsHolder(owner);
239

    
240
		// dates
241
		String createdDateString = docInfo.get("date_created");
242
		String updatedDateString = docInfo.get("date_updated");
243
		Date createdDate = DateTimeMarshaller.deserializeDateToUTC(createdDateString);
244
		Date updatedDate = DateTimeMarshaller.deserializeDateToUTC(updatedDateString);  
245
		sysMeta.setDateUploaded(createdDate);
246
		sysMeta.setDateSysMetadataModified(updatedDate);
247
		
248
		// set the revision history
249
		String docidWithoutRev = accNum.getDocid();
250
		Identifier obsoletedBy = null;
251
		Identifier obsoletes = null;
252
		Vector<Integer> revisions = DBUtil.getRevListFromRevisionTable(docidWithoutRev);
253
		// ensure this ordering since processing depends on it
254
		Collections.sort(revisions);
255
		for (int existingRev: revisions) {
256
			// use the docid+rev as the guid
257
			String existingPid = docidWithoutRev + "." + existingRev;
258
			if (existingRev < rev) {
259
				// it's the old docid, until it's not
260
				obsoletes = new Identifier();
261
				obsoletes.setValue(existingPid);
262
			}
263
			if (existingRev > rev) {
264
				// it's the newer docid
265
				obsoletedBy = new Identifier();
266
				obsoletedBy.setValue(existingPid);
267
				// only want the version just after it
268
				break;
269
			}
270
		}
271
		// set them on our object
272
		sysMeta.setObsoletedBy(obsoletedBy);
273
		sysMeta.setObsoletes(obsoletes);
274
		
275
		// update the system metadata for the object[s] we are revising
276
		if (obsoletedBy != null) {
277
			//SystemMetadata obsoletedBySysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletedBy);
278
			SystemMetadata obsoletedBySysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletedBy.getValue());
279
			if (obsoletedBySysMeta != null) {
280
				obsoletedBySysMeta.setObsoletes(identifier);
281
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletedBy, obsoletedBySysMeta);
282
			}
283
		}
284
		if (obsoletes != null) {
285
			//SystemMetadata obsoletesSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletes);
286
			SystemMetadata obsoletesSysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletes.getValue());
287
			if (obsoletesSysMeta != null) {
288
				obsoletesSysMeta.setObsoletedBy(identifier);
289
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletes, obsoletesSysMeta);
290
			}
291
		}
292
		
293
		// look up the access control policy we have in metacat
294
		AccessPolicy accessPolicy = IdentifierManager.getInstance().getAccessPolicy(guid);
295
		sysMeta.setAccessPolicy(accessPolicy);
296
		
297
		// authoritative node
298
		NodeReference nr = new NodeReference();
299
		nr.setValue(PropertyService.getProperty("dataone.memberNodeId"));
300
		sysMeta.setOriginMemberNode(nr);
301
		sysMeta.setAuthoritativeMemberNode(nr);
302
		
303
		// further parse EML documents to get data object format,
304
		// describes and describedBy information
305
		if (fmtid == ObjectFormatCache.getInstance().getFormat(
306
				"eml://ecoinformatics.org/eml-2.0.0").getFormatId()
307
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
308
						"eml://ecoinformatics.org/eml-2.0.1").getFormatId()
309
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
310
						"eml://ecoinformatics.org/eml-2.1.0").getFormatId()
311
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
312
						"eml://ecoinformatics.org/eml-2.1.1").getFormatId()) {
313

    
314
			try {
315
				inputStream = MetacatHandler.read(localId);
316
				DatabaseConnectionPoolInterface connectionPool = 
317
					MetacatDatabaseConnectionPoolFactory.getDatabaseConnectionPoolInterface();
318
				DataManager dataManager = 
319
					DataManager.getInstance(connectionPool, connectionPool.getDBAdapterName());
320
				DataPackage dataPackage = dataManager.parseMetadata(inputStream);
321

    
322
				// iterate through the data objects in the EML doc and add sysmeta
323
				logMetacat.debug("In createSystemMetadata() the number of data "
324
								+ "entities is: "
325
								+ dataPackage.getEntityNumber());
326

    
327
				// for generating the ORE map
328
	            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
329
	            List<Identifier> dataIds = new ArrayList<Identifier>();
330
				
331
				// iterate through data objects described by the EML
332
	            if (dataPackage.getEntityList() != null) {
333
					for (int j = 0; j < dataPackage.getEntityList().length; j++) {
334
	
335
						String dataDocUrl = dataPackage.getEntityList()[j].getURL();
336
						String dataDocMimeType = dataPackage.getEntityList()[j].getDataFormat();
337
						// default to binary
338
						if (dataDocMimeType == null) {
339
							dataDocMimeType = ObjectFormatCache.getInstance()
340
									.getFormat("application/octet-stream")
341
									.getFormatId().getValue();
342
						}
343
						String dataDocLocalId = "";
344
						logMetacat.debug("Data local ID: " + dataDocLocalId);
345
						logMetacat.debug("Data URL     : " + dataDocUrl);
346
						logMetacat.debug("Data mime    : " + dataDocMimeType);
347
	
348
						// we only handle ecogrid urls right now
349
						String ecogridPrefix = "ecogrid://knb/";
350
						if (dataDocUrl.trim().startsWith(ecogridPrefix)) {
351
							dataDocLocalId = dataDocUrl.substring(dataDocUrl
352
									.indexOf(ecogridPrefix)
353
									+ ecogridPrefix.length());
354
	
355
							// look up the guid for the data
356
							String dataDocid = DocumentUtil.getSmartDocId(dataDocLocalId);
357
							int dataRev = DocumentUtil.getRevisionFromAccessionNumber(dataDocLocalId);
358
	
359
							// check if data system metadata exists already
360
							SystemMetadata dataSysMeta = null;
361
							String dataGuidString = null;
362
							Identifier dataGuid = new Identifier();
363
							try {
364
								// look for the identifier
365
								dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
366
								// set it
367
								dataGuid.setValue(dataGuidString);
368
								// look up the system metadata
369
								try {
370
									dataSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(dataGuid);
371
								} catch (Exception e) {
372
									// probably not in the system
373
									dataSysMeta = null;
374
								}
375
								//dataSysMeta = IdentifierManager.getInstance().getSystemMetadata(dataGuidString);
376
							} catch (McdbDocNotFoundException nf) {
377
								// we didn't find it
378
								dataSysMeta = null;
379
							}
380
								
381
							// we'll have to generate it	
382
							if (dataSysMeta == null) {
383
								// System metadata for data doesn't exist yet, so create it
384
								logMetacat.debug("There was not an existing system metadata document for " + dataDocLocalId);
385
								try {
386
									logMetacat.debug("Creating a system metadata " + "document for " + dataDocLocalId);
387
									dataSysMeta = createSystemMetadata(dataDocLocalId, includeORE);
388
	
389
									// now look it up again
390
									dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
391
	
392
									// set the guid
393
									dataGuid.setValue(dataGuidString);
394
	
395
									// set object format
396
									logMetacat.debug("Updating system metadata for "
397
													+ dataGuid.getValue() + " to "
398
													+ dataDocMimeType);
399
									try {
400
										ObjectFormatIdentifier fmt = 
401
											ObjectFormatCache.getInstance().getFormat(dataDocMimeType).getFormatId();
402
										dataSysMeta.setFormatId(fmt);
403
									} catch (NotFound nfe) {
404
										logMetacat.debug("Couldn't find format identifier for: "
405
														+ dataDocMimeType
406
														+ ". Setting it to application/octet-stream.");
407
										ObjectFormatIdentifier newFmtid = new ObjectFormatIdentifier();
408
										newFmtid.setValue("application/octet-stream");
409
									}
410
	
411
									// update the values
412
									HazelcastService.getInstance().getSystemMetadataMap().put(dataSysMeta.getIdentifier(), dataSysMeta);
413
									
414
	
415
								} catch (McdbDocNotFoundException mdnf) {
416
									mdnf.printStackTrace();
417
									throw mdnf;
418
								} catch (NumberFormatException nfe) {
419
									nfe.printStackTrace();
420
									throw nfe;
421
								} catch (AccessionNumberException ane) {
422
									ane.printStackTrace();
423
									throw ane;
424
								} catch (SQLException sqle) {
425
									sqle.printStackTrace();
426
									throw sqle;
427
								} catch (NoSuchAlgorithmException nsae) {
428
									nsae.printStackTrace();
429
									throw nsae;
430
								} catch (IOException ioe) {
431
									ioe.printStackTrace();
432
									throw ioe;
433
								} catch (PropertyNotFoundException pnfe) {
434
									pnfe.printStackTrace();
435
									throw pnfe;
436
								} catch (BaseException be) {
437
									be.printStackTrace();
438
									throw be;
439
								}	
440
								
441
							}
442
							
443
							// part of the ORE package
444
							dataIds.add(dataGuid);
445
	
446
						} // end if (EML package)
447
	
448
					} // end for (data entities)
449
					
450
	            } // data entities not null
451
	            
452
				// ORE map
453
				if (includeORE) {
454
					// can we generate them?
455
			        if (!dataIds.isEmpty()) {
456
			        	// it doesn't exist in the system?
457
			        	if (!oreExistsFor(sysMeta.getIdentifier())) {
458
			        	
459
				            // generate the ORE map for this datapackage
460
				            Identifier resourceMapId = new Identifier();
461
				            // want to be able to run this over and over again for now
462
				            resourceMapId.setValue(resourceMapPrefix + sysMeta.getIdentifier().getValue());
463
				            idMap.put(sysMeta.getIdentifier(), dataIds);
464
				            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(resourceMapId, idMap);
465
				            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
466
				            // copy most of the same system metadata as the packaging metadata
467
				            SystemMetadata resourceMapSysMeta = new SystemMetadata();
468
				            BeanUtils.copyProperties(resourceMapSysMeta, sysMeta);
469
				            resourceMapSysMeta.setIdentifier(resourceMapId);
470
				            Checksum oreChecksum = ChecksumUtil.checksum(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), "MD5");
471
							resourceMapSysMeta.setChecksum(oreChecksum);
472
				            ObjectFormatIdentifier formatId = ObjectFormatCache.getInstance().getFormat("http://www.openarchives.org/ore/terms").getFormatId();
473
							resourceMapSysMeta.setFormatId(formatId);
474
							resourceMapSysMeta.setSize(BigInteger.valueOf(sizeOfStream(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING))));
475
							
476
							// set the revision graph
477
							resourceMapSysMeta.setObsoletes(null);
478
							resourceMapSysMeta.setObsoletedBy(null);
479
							// look up the resource map that this one obsoletes
480
							if (sysMeta.getObsoletes() != null) {
481
								Identifier resourceMapObsoletes = new Identifier();
482
								resourceMapObsoletes.setValue(resourceMapPrefix + sysMeta.getObsoletes().getValue());
483
								resourceMapSysMeta.setObsoletes(resourceMapObsoletes);
484
								SystemMetadata resourceMapObsoletesSystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletes);
485
								if (resourceMapObsoletesSystemMetadata != null) {
486
									resourceMapObsoletesSystemMetadata.setObsoletedBy(resourceMapId);
487
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletes, resourceMapObsoletesSystemMetadata);
488
								}
489
							}
490
							// look up the resource map that this one is obsoletedBy
491
							if (sysMeta.getObsoletedBy() != null) {
492
								Identifier resourceMapObsoletedBy = new Identifier();
493
								resourceMapObsoletedBy.setValue(resourceMapPrefix + sysMeta.getObsoletedBy().getValue());
494
								resourceMapSysMeta.setObsoletedBy(resourceMapObsoletedBy);
495
								SystemMetadata resourceMapObsoletedBySystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletedBy);
496
								if (resourceMapObsoletedBySystemMetadata != null) {
497
									resourceMapObsoletedBySystemMetadata.setObsoletes(resourceMapId);
498
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletedBy, resourceMapObsoletedBySystemMetadata);
499
								}
500
							}
501
				            
502
							// save it locally
503
							Session session = new Session();
504
							session.setSubject(submitter);
505
							MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
506
							MNodeService.getInstance(request).insertDataObject(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), resourceMapId, session);
507
							MNodeService.getInstance(request).insertSystemMetadata(resourceMapSysMeta);
508
			        	}
509
			        }
510
				}
511

    
512
			} catch (ParserConfigurationException pce) {
513
				logMetacat.debug("There was a problem parsing the EML document. "
514
								+ "The error message was: " + pce.getMessage());
515

    
516
			} catch (SAXException saxe) {
517
				logMetacat.debug("There was a problem traversing the EML document. "
518
								+ "The error message was: " + saxe.getMessage());
519

    
520
			} catch (XPathExpressionException xpee) {
521
				logMetacat.debug("There was a problem searching the EML document. "
522
								+ "The error message was: " + xpee.getMessage());
523
			} catch (Exception e) {
524
				logMetacat.debug("There was a problem creating System Metadata. "
525
								+ "The error message was: " + e.getMessage());
526
				e.printStackTrace();
527
			} // end try()
528

    
529
		} // end if()
530

    
531
		return sysMeta;
532
	}
533
	
534
	/**
535
	 * Determines if we already have registered an ORE map for this package
536
	 * @param guid of the EML/packaging object
537
	 * @return true if there is an ORE map for the given package
538
	 */
539
	private static boolean oreExistsFor(Identifier guid) {
540
		// TODO: implement call to CN.search()
541
		return false;
542
	}
543

    
544
	/**
545
	 * Find the size (in bytes) of a stream. Note: This needs to refactored out
546
	 * of MetacatHandler and into a utility when stream i/o in Metacat is
547
	 * evaluated.
548
	 * 
549
	 * @param is The InputStream of bytes
550
	 * 
551
	 * @return size The size in bytes of the input stream as a long
552
	 * 
553
	 * @throws IOException
554
	 */
555
	private static long sizeOfStream(InputStream is) throws IOException {
556

    
557
		long size = 0;
558
		byte[] b = new byte[1024];
559
		int numread = is.read(b, 0, 1024);
560
		while (numread != -1) {
561
			size += numread;
562
			numread = is.read(b, 0, 1024);
563
		}
564
		return size;
565

    
566
	}
567
}
(5-5/5)