Project

General

Profile

1
/**
2
 *  '$RCSfile$'
3
 *    Purpose: A Class for upgrading the database to version 1.5
4
 *  Copyright: 2000 Regents of the University of California and the
5
 *             National Center for Ecological Analysis and Synthesis
6
 *    Authors: Saurabh Garg
7
 *
8
 *   '$Author: leinfelder $'
9
 *     '$Date: 2011-12-16 11:00:44 -0800 (Fri, 16 Dec 2011) $'
10
 * '$Revision: 6800 $'
11
 *
12
 * This program is free software; you can redistribute it and/or modify
13
 * it under the terms of the GNU General Public License as published by
14
 * the Free Software Foundation; either version 2 of the License, or
15
 * (at your option) any later version.
16
 *
17
 * This program is distributed in the hope that it will be useful,
18
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20
 * GNU General Public License for more details.
21
 *
22
 * You should have received a copy of the GNU General Public License
23
 * along with this program; if not, write to the Free Software
24
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
 */
26
package edu.ucsb.nceas.metacat.dataone;
27

    
28
import java.io.BufferedInputStream;
29
import java.io.IOException;
30
import java.io.InputStream;
31
import java.math.BigInteger;
32
import java.security.NoSuchAlgorithmException;
33
import java.sql.SQLException;
34
import java.util.ArrayList;
35
import java.util.Collections;
36
import java.util.Date;
37
import java.util.HashMap;
38
import java.util.Hashtable;
39
import java.util.List;
40
import java.util.Map;
41
import java.util.Vector;
42

    
43
import javax.xml.parsers.ParserConfigurationException;
44
import javax.xml.xpath.XPathExpressionException;
45

    
46
import org.apache.commons.beanutils.BeanUtils;
47
import org.apache.commons.io.IOUtils;
48
import org.apache.log4j.Logger;
49
import org.apache.wicket.protocol.http.MockHttpServletRequest;
50
import org.dataone.client.ObjectFormatCache;
51
import org.dataone.ore.ResourceMapFactory;
52
import org.dataone.service.exceptions.BaseException;
53
import org.dataone.service.exceptions.NotFound;
54
import org.dataone.service.types.v1.AccessPolicy;
55
import org.dataone.service.types.v1.Checksum;
56
import org.dataone.service.types.v1.Identifier;
57
import org.dataone.service.types.v1.NodeReference;
58
import org.dataone.service.types.v1.ObjectFormatIdentifier;
59
import org.dataone.service.types.v1.Session;
60
import org.dataone.service.types.v1.Subject;
61
import org.dataone.service.types.v1.SystemMetadata;
62
import org.dataone.service.types.v1.util.ChecksumUtil;
63
import org.dataone.service.util.DateTimeMarshaller;
64
import org.dspace.foresite.ResourceMap;
65
import org.ecoinformatics.datamanager.DataManager;
66
import org.ecoinformatics.datamanager.database.DatabaseConnectionPoolInterface;
67
import org.ecoinformatics.datamanager.parser.DataPackage;
68
import org.jibx.runtime.JiBXException;
69
import org.xml.sax.SAXException;
70

    
71
import edu.ucsb.nceas.metacat.AccessionNumber;
72
import edu.ucsb.nceas.metacat.AccessionNumberException;
73
import edu.ucsb.nceas.metacat.DBUtil;
74
import edu.ucsb.nceas.metacat.IdentifierManager;
75
import edu.ucsb.nceas.metacat.McdbDocNotFoundException;
76
import edu.ucsb.nceas.metacat.McdbException;
77
import edu.ucsb.nceas.metacat.MetaCatServlet;
78
import edu.ucsb.nceas.metacat.MetacatHandler;
79
import edu.ucsb.nceas.metacat.accesscontrol.AccessControlException;
80
import edu.ucsb.nceas.metacat.client.InsufficientKarmaException;
81
import edu.ucsb.nceas.metacat.dataone.hazelcast.HazelcastService;
82
import edu.ucsb.nceas.metacat.dataquery.MetacatDatabaseConnectionPoolFactory;
83
import edu.ucsb.nceas.metacat.properties.PropertyService;
84
import edu.ucsb.nceas.metacat.replication.ReplicationService;
85
import edu.ucsb.nceas.metacat.shared.AccessException;
86
import edu.ucsb.nceas.metacat.shared.HandlerException;
87
import edu.ucsb.nceas.metacat.util.DocumentUtil;
88
import edu.ucsb.nceas.utilities.ParseLSIDException;
89
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
90

    
91
public class SystemMetadataFactory {
92

    
93
	private static final String resourceMapPrefix = "resourceMap_";
94
	private static Logger logMetacat = Logger.getLogger(SystemMetadataFactory.class);
95
	
96
	/**
97
	 * Creates a system metadata object for insertion into metacat
98
	 * 
99
	 * @param localId
100
	 *            The local document identifier
101
	 * @param user
102
	 *            The user submitting the system metadata document
103
	 * @param groups
104
	 *            The groups the user belongs to
105
	 * 
106
	 * @return sysMeta The system metadata object created
107
	 * @throws SAXException 
108
	 * @throws HandlerException 
109
	 * @throws AccessControlException 
110
	 * @throws AccessException 
111
	 */
112
	public static SystemMetadata createSystemMetadata(String localId, boolean includeORE)
113
			throws McdbException, McdbDocNotFoundException, SQLException,
114
			IOException, AccessionNumberException, ClassNotFoundException,
115
			InsufficientKarmaException, ParseLSIDException,
116
			PropertyNotFoundException, BaseException, NoSuchAlgorithmException,
117
			JiBXException, AccessControlException, HandlerException, SAXException, AccessException {
118
		
119
		logMetacat.debug("MetacatHandler.createSystemMetadata() called.");
120
		logMetacat.debug("provided localId: " + localId);
121

    
122
		// create system metadata for the document
123
		SystemMetadata sysMeta = new SystemMetadata();
124
		sysMeta.setSerialVersion(BigInteger.valueOf(1));
125
		int rev = IdentifierManager.getInstance().getLatestRevForLocalId(localId);
126
		AccessionNumber accNum = new AccessionNumber(localId, "NONE");
127
		String guid = null;
128
		try {
129
			// get the guid if it exists
130
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
131
		} catch (McdbDocNotFoundException dnfe) {
132
			// otherwise create the mapping
133
			logMetacat.debug("There was a problem getting the guid from "
134
							+ "the given localId (docid and revision). The error message was: "
135
							+ dnfe.getMessage());
136
			logMetacat.debug("No guid in the identifier table.  adding it for " + localId);
137
			IdentifierManager.getInstance().createMapping(localId, localId);
138
			logMetacat.debug("Mapping created for " + localId);
139
			logMetacat.debug("accessionNumber: " + accNum);
140
			guid = IdentifierManager.getInstance().getGUID(accNum.getDocid(), rev);
141
		}
142
		Identifier identifier = new Identifier();
143
		identifier.setValue(guid);
144

    
145
		// set the id
146
		sysMeta.setIdentifier(identifier);
147

    
148
		// get the data or metadata object
149
		InputStream inputStream;
150
		try {
151
			inputStream = MetacatHandler.read(localId);
152
		} catch (ParseLSIDException ple) {
153
			logMetacat.debug("There was a problem parsing the LSID from "
154
					+ localId + ". The error message was: " + ple.getMessage());
155
			throw ple;
156

    
157
		} catch (PropertyNotFoundException pnfe) {
158
			logMetacat.debug("There was a problem finding a property. "
159
					+ "The error message was: " + pnfe.getMessage());
160
			throw pnfe;
161

    
162
		} catch (McdbException me) {
163
			logMetacat.debug("There was a Metacat problem. "
164
					+ "The error message was: " + me.getMessage());
165
			throw me;
166

    
167
		} catch (SQLException sqle) {
168
			logMetacat.debug("There was a SQL problem. "
169
					+ "The error message was: " + sqle.getMessage());
170
			throw sqle;
171

    
172
		} catch (ClassNotFoundException cnfe) {
173
			logMetacat.debug("There was a problem finding a class. "
174
					+ "The error message was: " + cnfe.getMessage());
175
			throw cnfe;
176

    
177
		} catch (IOException ioe) {
178
			logMetacat.debug("There was an I/O exception. "
179
					+ "The error message was: " + ioe.getMessage());
180
			throw ioe;
181

    
182
		} // end try()
183

    
184
		// get additional docinfo
185
		Hashtable<String, String> docInfo = ReplicationService.getDocumentInfoMap(localId);
186
		// set the default object format
187
		String doctype = docInfo.get("doctype");
188
		ObjectFormatIdentifier fmtid = null;
189

    
190
		// set the object format, fall back to defaults
191
		try {
192
			fmtid = ObjectFormatCache.getInstance().getFormat(doctype).getFormatId();
193
		} catch (NotFound nfe) {
194

    
195
			try {
196
				// format is not registered, use default
197
				if (doctype.trim().equals("BIN")) {
198
					fmtid = ObjectFormatCache.getInstance().getFormat(
199
							"application/octet-stream").getFormatId();
200

    
201
				} else {
202
					fmtid = ObjectFormatCache.getInstance().getFormat(
203
							"text/plain").getFormatId();
204
				}
205

    
206
			} catch (NotFound nf) {
207
				logMetacat.error("There was a problem getting the default format "
208
								+ "from the ObjectFormatCache: "
209
								+ nf.getMessage());
210
				throw nf;
211
			}
212

    
213
		}
214

    
215
		sysMeta.setFormatId(fmtid);
216
		logMetacat.debug("The ObjectFormat for " + localId + " is " + fmtid.getValue());
217

    
218
		// create the checksum
219
		inputStream = MetacatHandler.read(localId);
220
		String algorithm = "MD5";
221
		Checksum checksum = ChecksumUtil.checksum(inputStream, algorithm);
222
		sysMeta.setChecksum(checksum);
223
		
224
		// set the size
225
		inputStream = MetacatHandler.read(localId);
226
		String sizeStr = new Long(sizeOfStream(inputStream)).toString();
227
		sysMeta.setSize(new BigInteger(sizeStr));
228
		
229
		// submitter
230
		Subject submitter = new Subject();
231
		submitter.setValue(docInfo.get("user_updated"));
232
		sysMeta.setSubmitter(submitter);
233
		
234
		// rights holder
235
		Subject owner = new Subject();
236
		owner.setValue(docInfo.get("user_owner"));
237
		sysMeta.setRightsHolder(owner);
238

    
239
		// dates
240
		String createdDateString = docInfo.get("date_created");
241
		String updatedDateString = docInfo.get("date_updated");
242
		Date createdDate = DateTimeMarshaller.deserializeDateToUTC(createdDateString);
243
		Date updatedDate = DateTimeMarshaller.deserializeDateToUTC(updatedDateString);  
244
		sysMeta.setDateUploaded(createdDate);
245
		sysMeta.setDateSysMetadataModified(updatedDate);
246
		
247
		// set the revision history
248
		String docidWithoutRev = accNum.getDocid();
249
		Identifier obsoletedBy = null;
250
		Identifier obsoletes = null;
251
		Vector<Integer> revisions = DBUtil.getRevListFromRevisionTable(docidWithoutRev);
252
		// ensure this ordering since processing depends on it
253
		Collections.sort(revisions);
254
		for (int existingRev: revisions) {
255
			// use the docid+rev as the guid
256
			String existingPid = docidWithoutRev + "." + existingRev;
257
			if (existingRev < rev) {
258
				// it's the old docid, until it's not
259
				obsoletes = new Identifier();
260
				obsoletes.setValue(existingPid);
261
			}
262
			if (existingRev > rev) {
263
				// it's the newer docid
264
				obsoletedBy = new Identifier();
265
				obsoletedBy.setValue(existingPid);
266
				// only want the version just after it
267
				break;
268
			}
269
		}
270
		// set them on our object
271
		sysMeta.setObsoletedBy(obsoletedBy);
272
		sysMeta.setObsoletes(obsoletes);
273
		
274
		// update the system metadata for the object[s] we are revising
275
		if (obsoletedBy != null) {
276
			//SystemMetadata obsoletedBySysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletedBy);
277
			SystemMetadata obsoletedBySysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletedBy.getValue());
278
			if (obsoletedBySysMeta != null) {
279
				obsoletedBySysMeta.setObsoletes(identifier);
280
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletedBy, obsoletedBySysMeta);
281
			}
282
		}
283
		if (obsoletes != null) {
284
			//SystemMetadata obsoletesSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(obsoletes);
285
			SystemMetadata obsoletesSysMeta = IdentifierManager.getInstance().getSystemMetadata(obsoletes.getValue());
286
			if (obsoletesSysMeta != null) {
287
				obsoletesSysMeta.setObsoletedBy(identifier);
288
				HazelcastService.getInstance().getSystemMetadataMap().put(obsoletes, obsoletesSysMeta);
289
			}
290
		}
291
		
292
		// look up the access control policy we have in metacat
293
		AccessPolicy accessPolicy = IdentifierManager.getInstance().getAccessPolicy(guid);
294
		sysMeta.setAccessPolicy(accessPolicy);
295
		
296
		// authoritative node
297
		NodeReference nr = new NodeReference();
298
		nr.setValue(PropertyService.getProperty("dataone.memberNodeId"));
299
		sysMeta.setOriginMemberNode(nr);
300
		sysMeta.setAuthoritativeMemberNode(nr);
301
		
302
		// further parse EML documents to get data object format,
303
		// describes and describedBy information
304
		if (fmtid == ObjectFormatCache.getInstance().getFormat(
305
				"eml://ecoinformatics.org/eml-2.0.0").getFormatId()
306
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
307
						"eml://ecoinformatics.org/eml-2.0.1").getFormatId()
308
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
309
						"eml://ecoinformatics.org/eml-2.1.0").getFormatId()
310
				|| fmtid == ObjectFormatCache.getInstance().getFormat(
311
						"eml://ecoinformatics.org/eml-2.1.1").getFormatId()) {
312

    
313
			try {
314
				inputStream = MetacatHandler.read(localId);
315
				DatabaseConnectionPoolInterface connectionPool = 
316
					MetacatDatabaseConnectionPoolFactory.getDatabaseConnectionPoolInterface();
317
				DataManager dataManager = 
318
					DataManager.getInstance(connectionPool, connectionPool.getDBAdapterName());
319
				DataPackage dataPackage = dataManager.parseMetadata(inputStream);
320

    
321
				// iterate through the data objects in the EML doc and add sysmeta
322
				logMetacat.debug("In createSystemMetadata() the number of data "
323
								+ "entities is: "
324
								+ dataPackage.getEntityNumber());
325

    
326
				// for generating the ORE map
327
	            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
328
	            List<Identifier> dataIds = new ArrayList<Identifier>();
329
				
330
				// iterate through data objects described by the EML
331
	            if (dataPackage.getEntityList() != null) {
332
					for (int j = 0; j < dataPackage.getEntityList().length; j++) {
333
	
334
						String dataDocUrl = dataPackage.getEntityList()[j].getURL();
335
						String dataDocMimeType = dataPackage.getEntityList()[j].getDataFormat();
336
						// default to binary
337
						if (dataDocMimeType == null) {
338
							dataDocMimeType = ObjectFormatCache.getInstance()
339
									.getFormat("application/octet-stream")
340
									.getFormatId().getValue();
341
						}
342
						String dataDocLocalId = "";
343
						logMetacat.debug("Data local ID: " + dataDocLocalId);
344
						logMetacat.debug("Data URL     : " + dataDocUrl);
345
						logMetacat.debug("Data mime    : " + dataDocMimeType);
346
	
347
						// we only handle ecogrid urls right now
348
						String ecogridPrefix = "ecogrid://knb/";
349
						if (dataDocUrl.trim().startsWith(ecogridPrefix)) {
350
							dataDocLocalId = dataDocUrl.substring(dataDocUrl
351
									.indexOf(ecogridPrefix)
352
									+ ecogridPrefix.length());
353
	
354
							// look up the guid for the data
355
							String dataDocid = DocumentUtil.getSmartDocId(dataDocLocalId);
356
							int dataRev = DocumentUtil.getRevisionFromAccessionNumber(dataDocLocalId);
357
	
358
							// check if data system metadata exists already
359
							SystemMetadata dataSysMeta = null;
360
							String dataGuidString = null;
361
							Identifier dataGuid = new Identifier();
362
							try {
363
								// look for the identifier
364
								dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
365
								// set it
366
								dataGuid.setValue(dataGuidString);
367
								// look up the system metadata
368
								try {
369
									dataSysMeta = HazelcastService.getInstance().getSystemMetadataMap().get(dataGuid);
370
								} catch (Exception e) {
371
									// probably not in the system
372
									dataSysMeta = null;
373
								}
374
								//dataSysMeta = IdentifierManager.getInstance().getSystemMetadata(dataGuidString);
375
							} catch (McdbDocNotFoundException nf) {
376
								// we didn't find it
377
								dataSysMeta = null;
378
							}
379
								
380
							// we'll have to generate it	
381
							if (dataSysMeta == null) {
382
								// System metadata for data doesn't exist yet, so create it
383
								logMetacat.debug("There was not an existing system metadata document for " + dataDocLocalId);
384
								try {
385
									logMetacat.debug("Creating a system metadata " + "document for " + dataDocLocalId);
386
									dataSysMeta = createSystemMetadata(dataDocLocalId, includeORE);
387
	
388
									// now look it up again
389
									dataGuidString = IdentifierManager.getInstance().getGUID(dataDocid, dataRev);
390
	
391
									// set the guid
392
									dataGuid.setValue(dataGuidString);
393
	
394
									// set object format
395
									logMetacat.debug("Updating system metadata for "
396
													+ dataGuid.getValue() + " to "
397
													+ dataDocMimeType);
398
									try {
399
										ObjectFormatIdentifier fmt = 
400
											ObjectFormatCache.getInstance().getFormat(dataDocMimeType).getFormatId();
401
										dataSysMeta.setFormatId(fmt);
402
									} catch (NotFound nfe) {
403
										logMetacat.debug("Couldn't find format identifier for: "
404
														+ dataDocMimeType
405
														+ ". Setting it to application/octet-stream.");
406
										ObjectFormatIdentifier newFmtid = new ObjectFormatIdentifier();
407
										newFmtid.setValue("application/octet-stream");
408
									}
409
	
410
									// update the values
411
									HazelcastService.getInstance().getSystemMetadataMap().put(dataSysMeta.getIdentifier(), dataSysMeta);
412
									
413
	
414
								} catch (McdbDocNotFoundException mdnf) {
415
									mdnf.printStackTrace();
416
									throw mdnf;
417
								} catch (NumberFormatException nfe) {
418
									nfe.printStackTrace();
419
									throw nfe;
420
								} catch (AccessionNumberException ane) {
421
									ane.printStackTrace();
422
									throw ane;
423
								} catch (SQLException sqle) {
424
									sqle.printStackTrace();
425
									throw sqle;
426
								} catch (NoSuchAlgorithmException nsae) {
427
									nsae.printStackTrace();
428
									throw nsae;
429
								} catch (IOException ioe) {
430
									ioe.printStackTrace();
431
									throw ioe;
432
								} catch (PropertyNotFoundException pnfe) {
433
									pnfe.printStackTrace();
434
									throw pnfe;
435
								} catch (BaseException be) {
436
									be.printStackTrace();
437
									throw be;
438
								}	
439
								
440
							}
441
							
442
							// part of the ORE package
443
							dataIds.add(dataGuid);
444
	
445
						} // end if (EML package)
446
	
447
					} // end for (data entities)
448
					
449
	            } // data entities not null
450
	            
451
				// ORE map
452
				if (includeORE) {
453
					// can we generate them?
454
			        if (!dataIds.isEmpty()) {
455
			        	// it doesn't exist in the system?
456
			        	if (!oreExistsFor(sysMeta.getIdentifier())) {
457
			        	
458
				            // generate the ORE map for this datapackage
459
				            Identifier resourceMapId = new Identifier();
460
				            // want to be able to run this over and over again for now
461
				            resourceMapId.setValue(resourceMapPrefix + sysMeta.getIdentifier().getValue());
462
				            idMap.put(sysMeta.getIdentifier(), dataIds);
463
				            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(resourceMapId, idMap);
464
				            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
465
				            // copy most of the same system metadata as the packaging metadata
466
				            SystemMetadata resourceMapSysMeta = new SystemMetadata();
467
				            BeanUtils.copyProperties(resourceMapSysMeta, sysMeta);
468
				            resourceMapSysMeta.setIdentifier(resourceMapId);
469
				            Checksum oreChecksum = ChecksumUtil.checksum(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), "MD5");
470
							resourceMapSysMeta.setChecksum(oreChecksum);
471
				            ObjectFormatIdentifier formatId = ObjectFormatCache.getInstance().getFormat("http://www.openarchives.org/ore/terms").getFormatId();
472
							resourceMapSysMeta.setFormatId(formatId);
473
							resourceMapSysMeta.setSize(BigInteger.valueOf(sizeOfStream(IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING))));
474
							
475
							// set the revision graph
476
							resourceMapSysMeta.setObsoletes(null);
477
							resourceMapSysMeta.setObsoletedBy(null);
478
							// look up the resource map that this one obsoletes
479
							if (sysMeta.getObsoletes() != null) {
480
								Identifier resourceMapObsoletes = new Identifier();
481
								resourceMapObsoletes.setValue(resourceMapPrefix + sysMeta.getObsoletes().getValue());
482
								resourceMapSysMeta.setObsoletes(resourceMapObsoletes);
483
								SystemMetadata resourceMapObsoletesSystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletes);
484
								if (resourceMapObsoletesSystemMetadata != null) {
485
									resourceMapObsoletesSystemMetadata.setObsoletedBy(resourceMapId);
486
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletes, resourceMapObsoletesSystemMetadata);
487
								}
488
							}
489
							// look up the resource map that this one is obsoletedBy
490
							if (sysMeta.getObsoletedBy() != null) {
491
								Identifier resourceMapObsoletedBy = new Identifier();
492
								resourceMapObsoletedBy.setValue(resourceMapPrefix + sysMeta.getObsoletedBy().getValue());
493
								resourceMapSysMeta.setObsoletedBy(resourceMapObsoletedBy);
494
								SystemMetadata resourceMapObsoletedBySystemMetadata = HazelcastService.getInstance().getSystemMetadataMap().get(resourceMapObsoletedBy);
495
								if (resourceMapObsoletedBySystemMetadata != null) {
496
									resourceMapObsoletedBySystemMetadata.setObsoletes(resourceMapId);
497
									HazelcastService.getInstance().getSystemMetadataMap().put(resourceMapObsoletedBy, resourceMapObsoletedBySystemMetadata);
498
								}
499
							}
500
				            
501
							// save it locally
502
							Session session = new Session();
503
							session.setSubject(submitter);
504
							MockHttpServletRequest request = new MockHttpServletRequest(null, null, null);
505
							MNodeService.getInstance(request).create(
506
									session, 
507
									resourceMapId, 
508
									IOUtils.toInputStream(resourceMapXML, MetaCatServlet.DEFAULT_ENCODING), 
509
									resourceMapSysMeta);
510
			        	}
511
			        }
512
				}
513

    
514
			} catch (ParserConfigurationException pce) {
515
				logMetacat.debug("There was a problem parsing the EML document. "
516
								+ "The error message was: " + pce.getMessage());
517

    
518
			} catch (SAXException saxe) {
519
				logMetacat.debug("There was a problem traversing the EML document. "
520
								+ "The error message was: " + saxe.getMessage());
521

    
522
			} catch (XPathExpressionException xpee) {
523
				logMetacat.debug("There was a problem searching the EML document. "
524
								+ "The error message was: " + xpee.getMessage());
525
			} catch (Exception e) {
526
				logMetacat.debug("There was a problem creating System Metadata. "
527
								+ "The error message was: " + e.getMessage());
528
				e.printStackTrace();
529
			} // end try()
530

    
531
		} // end if()
532

    
533
		return sysMeta;
534
	}
535
	
536
	/**
537
	 * Determines if we already have registered an ORE map for this package
538
	 * @param guid of the EML/packaging object
539
	 * @return true if there is an ORE map for the given package
540
	 */
541
	private static boolean oreExistsFor(Identifier guid) {
542
		// TODO: implement call to CN.search()
543
		return false;
544
	}
545

    
546
	/**
547
	 * Find the size (in bytes) of a stream. Note: This needs to refactored out
548
	 * of MetacatHandler and into a utility when stream i/o in Metacat is
549
	 * evaluated.
550
	 * 
551
	 * @param is The InputStream of bytes
552
	 * 
553
	 * @return size The size in bytes of the input stream as a long
554
	 * 
555
	 * @throws IOException
556
	 */
557
	private static long sizeOfStream(InputStream is) throws IOException {
558

    
559
		long size = 0;
560
		byte[] b = new byte[1024];
561
		int numread = is.read(b, 0, 1024);
562
		while (numread != -1) {
563
			size += numread;
564
			numread = is.read(b, 0, 1024);
565
		}
566
		return size;
567

    
568
	}
569
}
(5-5/5)