|
1 |
/**
|
|
2 |
* '$RCSfile$'
|
|
3 |
* Copyright: 2013 Regents of the University of California and the
|
|
4 |
* National Center for Ecological Analysis and Synthesis
|
|
5 |
*
|
|
6 |
* '$Author: tao $'
|
|
7 |
* '$Date: 2012-02-08 10:44:45 -0800 (Wed, 08 Feb 2012) $'
|
|
8 |
* '$Revision: 6996 $'
|
|
9 |
*
|
|
10 |
* This program is free software; you can redistribute it and/or modify
|
|
11 |
* it under the terms of the GNU General Public License as published by
|
|
12 |
* the Free Software Foundation; either version 2 of the License, or
|
|
13 |
* (at your option) any later version.
|
|
14 |
*
|
|
15 |
* This program is distributed in the hope that it will be useful,
|
|
16 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
17 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
18 |
* GNU General Public License for more details.
|
|
19 |
*
|
|
20 |
* You should have received a copy of the GNU General Public License
|
|
21 |
* along with this program; if not, write to the Free Software
|
|
22 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
23 |
*/
|
|
24 |
package edu.ucsb.nceas.metacat.admin.upgrade.solr;
|
|
25 |
|
|
26 |
import java.io.File;
|
|
27 |
import java.io.FileInputStream;
|
|
28 |
import java.io.IOException;
|
|
29 |
import java.security.NoSuchAlgorithmException;
|
|
30 |
import java.util.Vector;
|
|
31 |
|
|
32 |
import org.apache.commons.io.FileUtils;
|
|
33 |
import org.apache.commons.io.IOUtils;
|
|
34 |
import org.apache.commons.io.filefilter.DirectoryFileFilter;
|
|
35 |
import org.apache.commons.io.filefilter.OrFileFilter;
|
|
36 |
import org.apache.commons.io.filefilter.WildcardFileFilter;
|
|
37 |
import org.dataone.service.types.v1.Checksum;
|
|
38 |
import org.dataone.service.types.v1.util.ChecksumUtil;
|
|
39 |
|
|
40 |
import edu.ucsb.nceas.metacat.admin.AdminException;
|
|
41 |
import edu.ucsb.nceas.metacat.common.Settings;
|
|
42 |
import edu.ucsb.nceas.metacat.properties.PropertyService;
|
|
43 |
import edu.ucsb.nceas.metacat.service.ServiceService;
|
|
44 |
import edu.ucsb.nceas.metacat.shared.ServiceException;
|
|
45 |
import edu.ucsb.nceas.utilities.FileUtil;
|
|
46 |
import edu.ucsb.nceas.utilities.PropertyNotFoundException;
|
|
47 |
import edu.ucsb.nceas.utilities.StringUtil;
|
|
48 |
|
|
49 |
|
|
50 |
|
|
51 |
/**
|
|
52 |
* This class will overwrite the existing schema under the /solr-home/conf/schema.xml
|
|
53 |
* Here is the algorithm:
|
|
54 |
* 1. If the hash value of the existing schema.xml is a one in the list of released schema (this means the administrator
|
|
55 |
* didn't customize the schema ), we will overwrite the schema.xml and remove the solr-last-proccessed-date file. The removal
|
|
56 |
* of the solr-last-proccessed-date file will force the metacat-index to rebuild all solr index when the administrator restart
|
|
57 |
* the tomcat at next time.
|
|
58 |
* 2. If the hash value of the xisting schema.xml isn't in the list, an exception will be throw.
|
|
59 |
* @author tao
|
|
60 |
*
|
|
61 |
*/
|
|
62 |
public class SolrSchemaUpgrader {
|
|
63 |
|
|
64 |
private static final String SCHEMAFILERELATIVEPATH = "/conf/schema.xml";
|
|
65 |
private static final String MD5 = "MD5";
|
|
66 |
private Vector<String> releasedSchemaHashList = new Vector<String>();
|
|
67 |
private String solrHomePath = null;
|
|
68 |
private String metacatIndexSolrHome = null;
|
|
69 |
private String currentHash = null;
|
|
70 |
|
|
71 |
/**
|
|
72 |
* Constructor
|
|
73 |
* @throws PropertyNotFoundException
|
|
74 |
* @throws ServiceException
|
|
75 |
*/
|
|
76 |
public SolrSchemaUpgrader() throws PropertyNotFoundException, ServiceException
|
|
77 |
{
|
|
78 |
String hashString = null;
|
|
79 |
try {
|
|
80 |
hashString =
|
|
81 |
PropertyService.getProperty("index.schema.previous.hash");
|
|
82 |
currentHash = PropertyService.getProperty("index.schema.current.hash");
|
|
83 |
//System.out.println("the current hash is ================== "+currentHash);
|
|
84 |
solrHomePath = PropertyService.getProperty("solr.homeDir");
|
|
85 |
String indexContext = PropertyService.getProperty("index.context");
|
|
86 |
String metacatWebInf = ServiceService.getRealConfigDir();
|
|
87 |
metacatIndexSolrHome = metacatWebInf + "/../../" + indexContext + "/WEB-INF/classes/solr-home";
|
|
88 |
} catch (PropertyNotFoundException pnfe) {
|
|
89 |
throw new PropertyNotFoundException("SolrSchemaUpdator.Constructor - could not get a metacat property in the metacat.properties file - "
|
|
90 |
+ pnfe.getMessage());
|
|
91 |
}
|
|
92 |
releasedSchemaHashList = StringUtil.toVector(hashString, ';');
|
|
93 |
//System.out.println("the released hash is ================== "+releasedSchemaHashList);
|
|
94 |
}
|
|
95 |
|
|
96 |
/**
|
|
97 |
* Upgrade the schema in the solr home
|
|
98 |
* @throws NoSuchAlgorithmException
|
|
99 |
* @throws SolrSchemaModificationException
|
|
100 |
*/
|
|
101 |
public void upgrade() throws AdminException, IOException, NoSuchAlgorithmException, SolrSchemaModificationException {
|
|
102 |
boolean solrHomeExists = new File(solrHomePath).exists();
|
|
103 |
if (!solrHomeExists) {
|
|
104 |
//System.out.println("solr home doesn't exist ================== ");
|
|
105 |
//create the solr home and copy the files to it if it didn't exist
|
|
106 |
try {
|
|
107 |
// only attempt to copy if we have the source directory to copy from
|
|
108 |
File sourceDir = new File(metacatIndexSolrHome);
|
|
109 |
if (sourceDir.exists()) {
|
|
110 |
FileUtil.createDirectory(solrHomePath);
|
|
111 |
OrFileFilter fileFilter = new OrFileFilter();
|
|
112 |
fileFilter.addFileFilter(DirectoryFileFilter.DIRECTORY);
|
|
113 |
fileFilter.addFileFilter(new WildcardFileFilter("*"));
|
|
114 |
FileUtils.copyDirectory(new File(metacatIndexSolrHome), new File(solrHomePath), fileFilter );
|
|
115 |
}
|
|
116 |
} catch (Exception ue) {
|
|
117 |
String errorString = "SolrSchemaUpdator.update - Could not initialize directory: " + solrHomePath +
|
|
118 |
" : " + ue.getMessage();
|
|
119 |
throw new AdminException(errorString);
|
|
120 |
|
|
121 |
}
|
|
122 |
} else {
|
|
123 |
//System.out.println("solr home does exist ================== ");
|
|
124 |
// check it
|
|
125 |
if (!FileUtil.isDirectory(solrHomePath)) {
|
|
126 |
String errorString = "SolrSchemaUpdator.update - SOLR home is not a directory: " + solrHomePath;
|
|
127 |
throw new AdminException(errorString);
|
|
128 |
} else {
|
|
129 |
File metacatIndexSchemaFile = new File(metacatIndexSolrHome+SCHEMAFILERELATIVEPATH);
|
|
130 |
File schemaFile = new File(solrHomePath+SCHEMAFILERELATIVEPATH);
|
|
131 |
File processDateFile = new File(solrHomePath+"/"+Settings.LASTPROCESSEDDATEFILENAME);
|
|
132 |
if(metacatIndexSchemaFile.exists()) {
|
|
133 |
if(!schemaFile.exists()) {
|
|
134 |
FileUtils.copyFile(metacatIndexSchemaFile, schemaFile);
|
|
135 |
if(processDateFile.exists()) {
|
|
136 |
processDateFile.delete();
|
|
137 |
}
|
|
138 |
|
|
139 |
} else {
|
|
140 |
FileInputStream schemaInputStream = new FileInputStream(schemaFile);
|
|
141 |
Checksum checkSum = null;
|
|
142 |
try {
|
|
143 |
checkSum = ChecksumUtil.checksum(schemaInputStream, MD5);
|
|
144 |
if(schemaInputStream != null) {
|
|
145 |
IOUtils.closeQuietly(schemaInputStream);
|
|
146 |
}
|
|
147 |
} finally {
|
|
148 |
if(schemaInputStream != null) {
|
|
149 |
IOUtils.closeQuietly(schemaInputStream);
|
|
150 |
}
|
|
151 |
}
|
|
152 |
String error1 = "SolrSchemaUpdator.update - couldn't determine if the schema.xml in the "+solrHomePath+"/conf"+
|
|
153 |
" was modified or not. If you did modify it, please manually merge the change to the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" and copy it to "+
|
|
154 |
solrHomePath+"/conf; otherwise, just copy the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" to "+solrHomePath+"/conf.";
|
|
155 |
String error2 ="\nAfter copying the schema file, you have to issue a 'reindexall' action as an administrator.";
|
|
156 |
String error3 = "SolrSchemaUpdator.update - Metacat determined the schema.xml in the "+solrHomePath+"/conf"+
|
|
157 |
" was modified. Please manually merge the change to the file "+metacatIndexSolrHome+SCHEMAFILERELATIVEPATH +" and copy it to overwrite "+
|
|
158 |
solrHomePath+"/conf/schema.xml.";
|
|
159 |
if(checkSum != null) {
|
|
160 |
String checksumValue = checkSum.getValue();
|
|
161 |
//System.out.println("the existing schema.xml in the solr home has the checksum ================== "+checksumValue);
|
|
162 |
if(checksumValue != null) {
|
|
163 |
if(checksumValue.equals(currentHash)) {
|
|
164 |
//it has the newest schema, do nothing
|
|
165 |
//System.out.println("=====the existing schema.xml in the solr home has the same checksum as our current release, do nothing") ;
|
|
166 |
} else {
|
|
167 |
boolean found = false;
|
|
168 |
for(String value : releasedSchemaHashList) {
|
|
169 |
if (value.equals(checksumValue)) {
|
|
170 |
found = true;
|
|
171 |
break;
|
|
172 |
}
|
|
173 |
}
|
|
174 |
if(found) {
|
|
175 |
//there is no change in the schema. We can silently overwrite and remove the solr-last-process-date file.
|
|
176 |
//The removal of the solr-last-process-date file will force metacat-index to build all data objects in the
|
|
177 |
//next tomcat restart
|
|
178 |
//System.out.println("it is an old copy, overwrite it an delete the process data file ==========================") ;
|
|
179 |
FileUtils.copyFile(metacatIndexSchemaFile, schemaFile);
|
|
180 |
if(processDateFile.exists()) {
|
|
181 |
processDateFile.delete();
|
|
182 |
}
|
|
183 |
|
|
184 |
} else {
|
|
185 |
//users changed the schema, we have to throw an exception to ask the administrator to manually merge and overwrite.
|
|
186 |
throw new SolrSchemaModificationException(error3+error2);
|
|
187 |
}
|
|
188 |
}
|
|
189 |
} else {
|
|
190 |
throw new SolrSchemaModificationException(error1+error2);
|
|
191 |
}
|
|
192 |
} else {
|
|
193 |
throw new SolrSchemaModificationException(error1+error2);
|
|
194 |
}
|
|
195 |
}
|
|
196 |
}
|
|
197 |
|
|
198 |
}
|
|
199 |
}
|
|
200 |
}
|
|
201 |
|
|
202 |
|
|
203 |
}
|
Add code the overwrite the schema.xml in the solr-home/conf.