1
|
/**
|
2
|
* '$RCSfile$'
|
3
|
* Copyright: 2013 Regents of the University of California and the
|
4
|
* National Center for Ecological Analysis and Synthesis
|
5
|
*
|
6
|
* '$Author: tao $'
|
7
|
* '$Date: 2012-02-08 10:44:45 -0800 (Wed, 08 Feb 2012) $'
|
8
|
* '$Revision: 6996 $'
|
9
|
*
|
10
|
* This program is free software; you can redistribute it and/or modify
|
11
|
* it under the terms of the GNU General Public License as published by
|
12
|
* the Free Software Foundation; either version 2 of the License, or
|
13
|
* (at your option) any later version.
|
14
|
*
|
15
|
* This program is distributed in the hope that it will be useful,
|
16
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
17
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
18
|
* GNU General Public License for more details.
|
19
|
*
|
20
|
* You should have received a copy of the GNU General Public License
|
21
|
* along with this program; if not, write to the Free Software
|
22
|
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
23
|
*/
|
24
|
package edu.ucsb.nceas.metacat.admin.upgrade.solr;
|
25
|
|
26
|
import java.io.File;
|
27
|
import java.io.FileInputStream;
|
28
|
import java.io.IOException;
|
29
|
import java.security.NoSuchAlgorithmException;
|
30
|
import java.util.Vector;
|
31
|
|
32
|
import org.apache.commons.io.FileUtils;
|
33
|
import org.apache.commons.io.IOUtils;
|
34
|
import org.apache.commons.io.filefilter.DirectoryFileFilter;
|
35
|
import org.apache.commons.io.filefilter.OrFileFilter;
|
36
|
import org.apache.commons.io.filefilter.WildcardFileFilter;
|
37
|
import org.apache.log4j.Logger;
|
38
|
import org.dataone.service.types.v1.Checksum;
|
39
|
import org.dataone.service.types.v1.util.ChecksumUtil;
|
40
|
|
41
|
import edu.ucsb.nceas.metacat.admin.AdminException;
|
42
|
import edu.ucsb.nceas.metacat.common.Settings;
|
43
|
import edu.ucsb.nceas.metacat.properties.PropertyService;
|
44
|
import edu.ucsb.nceas.metacat.service.ServiceService;
|
45
|
import edu.ucsb.nceas.metacat.shared.ServiceException;
|
46
|
import edu.ucsb.nceas.utilities.FileUtil;
|
47
|
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
|
48
|
import edu.ucsb.nceas.utilities.StringUtil;
|
49
|
|
50
|
|
51
|
|
52
|
/**
|
53
|
* This class will overwrite the existing schema under the /solr-home/conf/schema.xml
|
54
|
* Here is the algorithm:
|
55
|
* 1. If the hash value of the existing schema.xml is a one in the list of released schema (this means the administrator
|
56
|
* didn't customize the schema ), we will overwrite the schema.xml and remove the solr-last-proccessed-date file. The removal
|
57
|
* of the solr-last-proccessed-date file will force the metacat-index to rebuild all solr index when the administrator restart
|
58
|
* the tomcat at next time.
|
59
|
* 2. If the hash value of the existing schema.xml isn't in the list, an exception will be throw.
|
60
|
* @author tao
|
61
|
*
|
62
|
*/
|
63
|
public class SolrSchemaUpgrader {
|
64
|
|
65
|
private static Logger logMetacat = Logger.getLogger(SolrSchemaUpgrader.class);
|
66
|
private static final String SCHEMAFILERELATIVEPATH = "/conf/schema.xml";
|
67
|
private static final String MD5 = "MD5";
|
68
|
private Vector<String> releasedSchemaHashList = new Vector<String>();
|
69
|
private String solrHomePath = null;
|
70
|
private String metacatIndexSolrHome = null;
|
71
|
private String currentHash = null;
|
72
|
|
73
|
/**
|
74
|
* Constructor
|
75
|
* @throws PropertyNotFoundException
|
76
|
* @throws ServiceException
|
77
|
*/
|
78
|
public SolrSchemaUpgrader() throws PropertyNotFoundException, ServiceException
|
79
|
{
|
80
|
String hashString = null;
|
81
|
try {
|
82
|
hashString =
|
83
|
PropertyService.getProperty("index.schema.previous.hash");
|
84
|
currentHash = PropertyService.getProperty("index.schema.current.hash");
|
85
|
logMetacat.info("the current hash is ================== "+currentHash);
|
86
|
solrHomePath = PropertyService.getProperty("solr.homeDir");
|
87
|
String indexContext = PropertyService.getProperty("index.context");
|
88
|
String metacatWebInf = ServiceService.getRealConfigDir();
|
89
|
metacatIndexSolrHome = metacatWebInf + "/../../" + indexContext + "/WEB-INF/classes/solr-home";
|
90
|
} catch (PropertyNotFoundException pnfe) {
|
91
|
throw new PropertyNotFoundException("SolrSchemaUpdator.Constructor - could not get a metacat property in the metacat.properties file - "
|
92
|
+ pnfe.getMessage());
|
93
|
}
|
94
|
releasedSchemaHashList = StringUtil.toVector(hashString, ';');
|
95
|
logMetacat.info("the released hash is ================== "+releasedSchemaHashList);
|
96
|
}
|
97
|
|
98
|
/**
|
99
|
* Upgrade the schema in the solr home
|
100
|
* @throws NoSuchAlgorithmException
|
101
|
* @throws SolrSchemaModificationException
|
102
|
*/
|
103
|
public void upgrade() throws AdminException, IOException, NoSuchAlgorithmException, SolrSchemaModificationException {
|
104
|
boolean solrHomeExists = new File(solrHomePath).exists();
|
105
|
if (!solrHomeExists) {
|
106
|
//System.out.println("solr home doesn't exist ================== ");
|
107
|
//create the solr home and copy the files to it if it didn't exist
|
108
|
try {
|
109
|
// only attempt to copy if we have the source directory to copy from
|
110
|
File sourceDir = new File(metacatIndexSolrHome);
|
111
|
if (sourceDir.exists()) {
|
112
|
FileUtil.createDirectory(solrHomePath);
|
113
|
OrFileFilter fileFilter = new OrFileFilter();
|
114
|
fileFilter.addFileFilter(DirectoryFileFilter.DIRECTORY);
|
115
|
fileFilter.addFileFilter(new WildcardFileFilter("*"));
|
116
|
FileUtils.copyDirectory(new File(metacatIndexSolrHome), new File(solrHomePath), fileFilter );
|
117
|
}
|
118
|
} catch (Exception ue) {
|
119
|
String errorString = "SolrSchemaUpdator.update - Could not initialize directory: " + solrHomePath +
|
120
|
" : " + ue.getMessage();
|
121
|
throw new AdminException(errorString);
|
122
|
|
123
|
}
|
124
|
} else {
|
125
|
//System.out.println("solr home does exist ================== ");
|
126
|
// check it
|
127
|
if (!FileUtil.isDirectory(solrHomePath)) {
|
128
|
String errorString = "SolrSchemaUpdator.update - SOLR home is not a directory: " + solrHomePath;
|
129
|
throw new AdminException(errorString);
|
130
|
} else {
|
131
|
File metacatIndexSchemaFile = new File(metacatIndexSolrHome+SCHEMAFILERELATIVEPATH);
|
132
|
File schemaFile = new File(solrHomePath+SCHEMAFILERELATIVEPATH);
|
133
|
File processDateFile = new File(solrHomePath+"/"+Settings.LASTPROCESSEDDATEFILENAME);
|
134
|
if(metacatIndexSchemaFile.exists()) {
|
135
|
if(!schemaFile.exists()) {
|
136
|
FileUtils.copyFile(metacatIndexSchemaFile, schemaFile);
|
137
|
if(processDateFile.exists()) {
|
138
|
processDateFile.delete();
|
139
|
}
|
140
|
|
141
|
} else {
|
142
|
FileInputStream schemaInputStream = new FileInputStream(schemaFile);
|
143
|
Checksum checkSum = null;
|
144
|
try {
|
145
|
checkSum = ChecksumUtil.checksum(schemaInputStream, MD5);
|
146
|
if(schemaInputStream != null) {
|
147
|
IOUtils.closeQuietly(schemaInputStream);
|
148
|
}
|
149
|
} finally {
|
150
|
if(schemaInputStream != null) {
|
151
|
IOUtils.closeQuietly(schemaInputStream);
|
152
|
}
|
153
|
}
|
154
|
String error1 = "SolrSchemaUpdator.update - couldn't determine if the schema.xml in the "+solrHomePath+"/conf"+
|
155
|
" was modified or not. If you did modify it, please manually merge the change to the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" and copy it to "+
|
156
|
solrHomePath+"/conf; otherwise, just copy the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" to "+solrHomePath+"/conf.";
|
157
|
//String error2 ="After configuring Metacat and restarting Tomcat, you have to issue a 'reindexall' action as an administrator to rebuild the Solr index.";
|
158
|
String error3 = "Metacat determined the schema.xml in the "+solrHomePath+"/conf"+
|
159
|
" was customized. You have to manually fix the issue - merge the change to the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" and copy it to overwrite the schema.xml in the "+
|
160
|
solrHomePath+"/conf. You may click the OK button When you finish the merging. ";
|
161
|
if(checkSum != null) {
|
162
|
String checksumValue = checkSum.getValue();
|
163
|
logMetacat.info("the existing schema.xml in the solr home has the checksum ================== "+checksumValue);
|
164
|
if(checksumValue != null) {
|
165
|
if(checksumValue.equals(currentHash)) {
|
166
|
//it has the newest schema, do nothing
|
167
|
logMetacat.info("=====the existing schema.xml in the solr home has the same checksum as our current release, do nothing") ;
|
168
|
} else {
|
169
|
boolean found = false;
|
170
|
for(String value : releasedSchemaHashList) {
|
171
|
if (value.equals(checksumValue)) {
|
172
|
found = true;
|
173
|
break;
|
174
|
}
|
175
|
}
|
176
|
if(found) {
|
177
|
//there is no change in the schema. We can silently overwrite and remove the solr-last-process-date file.
|
178
|
//The removal of the solr-last-process-date file will force metacat-index to build all data objects in the
|
179
|
//next tomcat restart
|
180
|
//System.out.println("it is an old copy, overwrite it an delete the process data file ==========================") ;
|
181
|
FileUtils.copyFile(metacatIndexSchemaFile, schemaFile);
|
182
|
if(processDateFile.exists()) {
|
183
|
processDateFile.delete();
|
184
|
}
|
185
|
|
186
|
} else {
|
187
|
//users changed the schema, we have to throw an exception to ask the administrator to manually merge and overwrite.
|
188
|
throw new SolrSchemaModificationException(error3);
|
189
|
}
|
190
|
}
|
191
|
} else {
|
192
|
throw new SolrSchemaModificationException(error1);
|
193
|
}
|
194
|
} else {
|
195
|
throw new SolrSchemaModificationException(error1);
|
196
|
}
|
197
|
}
|
198
|
}
|
199
|
|
200
|
}
|
201
|
}
|
202
|
}
|
203
|
|
204
|
|
205
|
}
|