Revision 7830
Added by ben leinfelder over 11 years ago
lib/hazelcast.xml | ||
---|---|---|
82 | 82 |
<write-delay-seconds>0</write-delay-seconds> |
83 | 83 |
</map-store> |
84 | 84 |
</map> |
85 |
<map name="hzIndexEventMap"> |
|
86 |
<backup-count>3</backup-count> |
|
87 |
<eviction-policy>LRU</eviction-policy> |
|
88 |
<max-size policy="cluster_wide_map_size">1000</max-size> |
|
89 |
<eviction-percentage>25</eviction-percentage> |
|
90 |
<merge-policy>hz.ADD_NEW_ENTRY</merge-policy> |
|
91 |
<map-store enabled="true"> |
|
92 |
<class-name>edu.ucsb.nceas.metacat.index.IndexEventEntryListener</class-name> |
|
93 |
<write-delay-seconds>0</write-delay-seconds> |
|
94 |
</map-store> |
|
95 |
</map> |
|
85 | 96 |
</hazelcast> |
lib/metacat.properties | ||
---|---|---|
22 | 22 |
############### Application Values ############ |
23 | 23 |
|
24 | 24 |
## one of the few places where we use ANT tokens |
25 |
application.metacatVersion=2.0.7
|
|
25 |
application.metacatVersion=2.1.0
|
|
26 | 26 |
application.metacatReleaseInfo=-1 |
27 | 27 |
|
28 | 28 |
application.deployDir= |
... | ... | |
79 | 79 |
database.upgradeVersion.2.0.5=upgrade-db-to-2.0.5 |
80 | 80 |
database.upgradeVersion.2.0.6=upgrade-db-to-2.0.6 |
81 | 81 |
database.upgradeVersion.2.0.7=upgrade-db-to-2.0.7 |
82 |
database.upgradeVersion.2.1.0=upgrade-db-to-2.1.0 |
|
82 | 83 |
## for running java-based utilities |
83 | 84 |
database.upgradeUtility.1.5.0=edu.ucsb.nceas.metacat.admin.upgrade.Upgrade1_5_0 |
84 | 85 |
database.upgradeUtility.2.0.0=edu.ucsb.nceas.metacat.admin.upgrade.Upgrade2_0_0 |
src/xmltables-oracle.sql | ||
---|---|---|
458 | 458 |
CREATE INDEX identifier_rev on identifier(rev); |
459 | 459 |
CREATE INDEX identifier_docid_rev on identifier(docid, rev); |
460 | 460 |
|
461 |
|
|
461 | 462 |
/* |
463 |
* the index_event table for solr-based indexing |
|
464 |
*/ |
|
465 |
CREATE TABLE index_event ( |
|
466 |
guid VARCHAR2(2000), |
|
467 |
event_action VARCHAR2(250), |
|
468 |
description VARCHAR2(2000), |
|
469 |
event_date DATE |
|
470 |
); |
|
471 |
|
|
472 |
/* |
|
462 | 473 |
* accesssubtree -- table to store access subtree info |
463 | 474 |
*/ |
464 | 475 |
CREATE TABLE xml_accesssubtree ( |
src/xmltables-postgres.sql | ||
---|---|---|
402 | 402 |
CONSTRAINT access_log_pk PRIMARY KEY (entryid) |
403 | 403 |
); |
404 | 404 |
|
405 |
/* |
|
406 |
* the index_event table for solr-based indexing |
|
407 |
*/ |
|
408 |
CREATE TABLE index_event ( |
|
409 |
guid text, |
|
410 |
event_action VARCHAR(250), |
|
411 |
description text, |
|
412 |
event_date TIMESTAMP |
|
413 |
); |
|
405 | 414 |
|
406 | 415 |
/* |
407 | 416 |
* Table for indexing the paths specified the administrator in metacat.properties |
src/loaddtdschema-postgres.sql | ||
---|---|---|
92 | 92 |
INSERT INTO xml_catalog (entry_type, public_id, system_id) |
93 | 93 |
VALUES ('Schema', 'http://ecoinformatics.org/registryentry-1.0.0', '/schema/RegistryService/RegistryEntryType.xsd'); |
94 | 94 |
INSERT INTO db_version (version, status, date_created) |
95 |
VALUES ('2.0.7',1,CURRENT_DATE); |
|
95 |
VALUES ('2.1.0',1,CURRENT_DATE); |
src/loaddtdschema-oracle.sql | ||
---|---|---|
92 | 92 |
INSERT INTO xml_catalog (entry_type, public_id, system_id) |
93 | 93 |
VALUES ('Schema', '/schema/RegistryService/RegistryEntryType.xsd', '/schema/RegistryService/RegistryEntryType.xsd'); |
94 | 94 |
INSERT INTO db_version (version, status, date_created) |
95 |
VALUES ('2.0.7',1,CURRENT_DATE); |
|
95 |
VALUES ('2.1.0',1,CURRENT_DATE); |
src/edu/ucsb/nceas/metacat/index/IndexEventEntryListener.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Purpose: Implements a service for managing a Hazelcast cluster member |
|
4 |
* Copyright: 2013 Regents of the University of California and the |
|
5 |
* National Center for Ecological Analysis and Synthesis |
|
6 |
* Authors: Leinfelder |
|
7 |
* |
|
8 |
* '$Author$' |
|
9 |
* '$Date$' |
|
10 |
* '$Revision$' |
|
11 |
* |
|
12 |
* This program is free software; you can redistribute it and/or modify |
|
13 |
* it under the terms of the GNU General Public License as published by |
|
14 |
* the Free Software Foundation; either version 2 of the License, or |
|
15 |
* (at your option) any later version. |
|
16 |
* |
|
17 |
* This program is distributed in the hope that it will be useful, |
|
18 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 |
* GNU General Public License for more details. |
|
21 |
* |
|
22 |
* You should have received a copy of the GNU General Public License |
|
23 |
* along with this program; if not, write to the Free Software |
|
24 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
25 |
*/ |
|
1 | 26 |
package edu.ucsb.nceas.metacat.index; |
2 | 27 |
|
28 |
import java.sql.SQLException; |
|
3 | 29 |
import java.util.Collection; |
4 | 30 |
import java.util.Map; |
5 | 31 |
import java.util.Set; |
32 |
import java.util.TreeMap; |
|
6 | 33 |
|
34 |
import org.apache.log4j.Logger; |
|
7 | 35 |
import org.dataone.service.types.v1.Identifier; |
8 | 36 |
|
9 | 37 |
import com.hazelcast.core.EntryEvent; |
... | ... | |
15 | 43 |
|
16 | 44 |
public class IndexEventEntryListener implements MapStore<Identifier, IndexEvent>, MapLoader<Identifier, IndexEvent>, EntryListener<Identifier, IndexEvent> { |
17 | 45 |
|
18 |
|
|
46 |
private Logger logMetacat = Logger.getLogger(IndexEventEntryListener.class); |
|
47 |
|
|
19 | 48 |
/** |
20 | 49 |
* The map store/loader methods |
21 | 50 |
*/ |
22 | 51 |
|
23 | 52 |
@Override |
24 |
public IndexEvent load(Identifier arg0) { |
|
25 |
// TODO Auto-generated method stub |
|
53 |
public IndexEvent load(Identifier identifier) { |
|
54 |
try { |
|
55 |
return IndexEventDAO.getInstance().get(identifier); |
|
56 |
} catch (SQLException e) { |
|
57 |
logMetacat.error(e.getMessage(), e); |
|
58 |
} |
|
26 | 59 |
return null; |
27 | 60 |
} |
28 | 61 |
|
29 | 62 |
@Override |
30 |
public Map<Identifier, IndexEvent> loadAll(Collection<Identifier> arg0) { |
|
31 |
// TODO Auto-generated method stub |
|
32 |
return null; |
|
63 |
public Map<Identifier, IndexEvent> loadAll(Collection<Identifier> identifiers) { |
|
64 |
Map<Identifier, IndexEvent> eventMap = new TreeMap<Identifier, IndexEvent>(); |
|
65 |
for (Identifier identifier: identifiers) { |
|
66 |
IndexEvent event = null; |
|
67 |
try { |
|
68 |
event = IndexEventDAO.getInstance().get(identifier); |
|
69 |
eventMap.put(identifier, event); |
|
70 |
} catch (SQLException e) { |
|
71 |
logMetacat.error(e.getMessage(), e); |
|
72 |
} |
|
73 |
} |
|
74 |
return eventMap; |
|
33 | 75 |
} |
34 | 76 |
|
35 | 77 |
@Override |
36 | 78 |
public Set<Identifier> loadAllKeys() { |
37 |
// TODO Auto-generated method stub |
|
79 |
try { |
|
80 |
return IndexEventDAO.getInstance().getAllIdentifiers(); |
|
81 |
} catch (SQLException e) { |
|
82 |
logMetacat.error(e.getMessage(), e); |
|
83 |
} |
|
38 | 84 |
return null; |
39 | 85 |
} |
40 | 86 |
|
41 | 87 |
@Override |
42 |
public void delete(Identifier arg0) { |
|
43 |
// TODO Auto-generated method stub |
|
44 |
|
|
88 |
public void delete(Identifier identifier) { |
|
89 |
try { |
|
90 |
IndexEventDAO.getInstance().remove(identifier); |
|
91 |
} catch (SQLException e) { |
|
92 |
logMetacat.error(e.getMessage(), e); |
|
93 |
} |
|
45 | 94 |
} |
46 | 95 |
|
47 | 96 |
@Override |
48 |
public void deleteAll(Collection<Identifier> arg0) { |
|
49 |
// TODO Auto-generated method stub |
|
50 |
|
|
97 |
public void deleteAll(Collection<Identifier> identifiers) { |
|
98 |
for (Identifier identifier: identifiers) { |
|
99 |
try { |
|
100 |
IndexEventDAO.getInstance().remove(identifier); |
|
101 |
} catch (SQLException e) { |
|
102 |
logMetacat.error(e.getMessage(), e); |
|
103 |
} |
|
104 |
} |
|
51 | 105 |
} |
52 | 106 |
|
53 | 107 |
@Override |
54 |
public void store(Identifier arg0, IndexEvent arg1) { |
|
55 |
// TODO Auto-generated method stub |
|
56 |
|
|
108 |
public void store(Identifier identifier, IndexEvent event) { |
|
109 |
try { |
|
110 |
IndexEventDAO.getInstance().add(event); |
|
111 |
} catch (SQLException e) { |
|
112 |
logMetacat.error(e.getMessage(), e); |
|
113 |
} |
|
57 | 114 |
} |
58 | 115 |
|
59 | 116 |
@Override |
60 |
public void storeAll(Map<Identifier, IndexEvent> arg0) { |
|
61 |
// TODO Auto-generated method stub |
|
62 |
|
|
117 |
public void storeAll(Map<Identifier, IndexEvent> indexEventMap) { |
|
118 |
for (IndexEvent event: indexEventMap.values()) { |
|
119 |
try { |
|
120 |
IndexEventDAO.getInstance().add(event); |
|
121 |
} catch (SQLException e) { |
|
122 |
logMetacat.error(e.getMessage(), e); |
|
123 |
} |
|
124 |
} |
|
63 | 125 |
} |
64 | 126 |
|
65 | 127 |
/** |
... | ... | |
67 | 129 |
*/ |
68 | 130 |
|
69 | 131 |
@Override |
70 |
public void entryAdded(EntryEvent<Identifier, IndexEvent> arg0) { |
|
71 |
// TODO Auto-generated method stub |
|
72 |
|
|
132 |
public void entryAdded(EntryEvent<Identifier, IndexEvent> event) { |
|
133 |
try { |
|
134 |
IndexEventDAO.getInstance().add(event.getValue()); |
|
135 |
} catch (SQLException e) { |
|
136 |
logMetacat.error(e.getMessage(), e); |
|
137 |
} |
|
73 | 138 |
} |
74 | 139 |
|
75 | 140 |
@Override |
76 | 141 |
public void entryEvicted(EntryEvent<Identifier, IndexEvent> arg0) { |
77 |
// TODO Auto-generated method stub
|
|
142 |
// do nothing
|
|
78 | 143 |
|
79 | 144 |
} |
80 | 145 |
|
81 | 146 |
@Override |
82 |
public void entryRemoved(EntryEvent<Identifier, IndexEvent> arg0) { |
|
83 |
// TODO Auto-generated method stub |
|
84 |
|
|
147 |
public void entryRemoved(EntryEvent<Identifier, IndexEvent> event) { |
|
148 |
try { |
|
149 |
IndexEventDAO.getInstance().remove(event.getKey()); |
|
150 |
} catch (SQLException e) { |
|
151 |
logMetacat.error(e.getMessage(), e); |
|
152 |
} |
|
85 | 153 |
} |
86 | 154 |
|
87 | 155 |
@Override |
88 |
public void entryUpdated(EntryEvent<Identifier, IndexEvent> arg0) { |
|
89 |
// TODO Auto-generated method stub |
|
90 |
|
|
156 |
public void entryUpdated(EntryEvent<Identifier, IndexEvent> event) { |
|
157 |
try { |
|
158 |
IndexEventDAO.getInstance().add(event.getValue()); |
|
159 |
} catch (SQLException e) { |
|
160 |
logMetacat.error(e.getMessage(), e); |
|
161 |
} |
|
91 | 162 |
} |
92 | 163 |
|
93 | 164 |
} |
src/edu/ucsb/nceas/metacat/index/IndexEventDAO.java | ||
---|---|---|
1 |
/** |
|
2 |
* '$RCSfile$' |
|
3 |
* Purpose: Implements a service for managing a Hazelcast cluster member |
|
4 |
* Copyright: 2013 Regents of the University of California and the |
|
5 |
* National Center for Ecological Analysis and Synthesis |
|
6 |
* Authors: Leinfelder |
|
7 |
* |
|
8 |
* '$Author$' |
|
9 |
* '$Date$' |
|
10 |
* '$Revision$' |
|
11 |
* |
|
12 |
* This program is free software; you can redistribute it and/or modify |
|
13 |
* it under the terms of the GNU General Public License as published by |
|
14 |
* the Free Software Foundation; either version 2 of the License, or |
|
15 |
* (at your option) any later version. |
|
16 |
* |
|
17 |
* This program is distributed in the hope that it will be useful, |
|
18 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
19 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
20 |
* GNU General Public License for more details. |
|
21 |
* |
|
22 |
* You should have received a copy of the GNU General Public License |
|
23 |
* along with this program; if not, write to the Free Software |
|
24 |
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
25 |
*/ |
|
26 |
package edu.ucsb.nceas.metacat.index; |
|
27 |
|
|
28 |
import java.sql.PreparedStatement; |
|
29 |
import java.sql.ResultSet; |
|
30 |
import java.sql.SQLException; |
|
31 |
import java.sql.Timestamp; |
|
32 |
import java.util.Set; |
|
33 |
import java.util.TreeSet; |
|
34 |
|
|
35 |
import org.dataone.service.types.v1.Event; |
|
36 |
import org.dataone.service.types.v1.Identifier; |
|
37 |
|
|
38 |
import edu.ucsb.nceas.metacat.common.index.event.IndexEvent; |
|
39 |
import edu.ucsb.nceas.metacat.database.DBConnection; |
|
40 |
import edu.ucsb.nceas.metacat.database.DBConnectionPool; |
|
41 |
|
|
42 |
public class IndexEventDAO { |
|
43 |
|
|
44 |
private static IndexEventDAO instance = null; |
|
45 |
|
|
46 |
private IndexEventDAO() {} |
|
47 |
|
|
48 |
public static IndexEventDAO getInstance() { |
|
49 |
if (instance == null) { |
|
50 |
instance = new IndexEventDAO(); |
|
51 |
} |
|
52 |
return instance; |
|
53 |
} |
|
54 |
|
|
55 |
public void add(IndexEvent event) throws SQLException { |
|
56 |
String sql = "insert into index_event(guid, event_action, description, event_date) values (?, ?, ?, ?)"; |
|
57 |
DBConnection dbConn = null; |
|
58 |
int serialNumber = -1; |
|
59 |
try { |
|
60 |
// Get a database connection from the pool |
|
61 |
dbConn = DBConnectionPool.getDBConnection("IndexEventDAO.add"); |
|
62 |
serialNumber = dbConn.getCheckOutSerialNumber(); |
|
63 |
|
|
64 |
// Execute the statement |
|
65 |
PreparedStatement stmt = dbConn.prepareStatement(sql); |
|
66 |
stmt.setString(1, event.getIdentifier().getValue()); |
|
67 |
stmt.setString(2, event.getAction().xmlValue()); |
|
68 |
stmt.setString(3, event.getDescription()); |
|
69 |
stmt.setTimestamp(4, new Timestamp(event.getDate().getTime())); |
|
70 |
|
|
71 |
stmt.executeUpdate(); |
|
72 |
stmt.close(); |
|
73 |
} finally { |
|
74 |
// Return database connection to the pool |
|
75 |
DBConnectionPool.returnDBConnection(dbConn, serialNumber); |
|
76 |
} |
|
77 |
} |
|
78 |
|
|
79 |
public void remove(Identifier identifier) throws SQLException { |
|
80 |
String sql = "delete from index_event where guid = ?"; |
|
81 |
DBConnection dbConn = null; |
|
82 |
int serialNumber = -1; |
|
83 |
try { |
|
84 |
// Get a database connection from the pool |
|
85 |
dbConn = DBConnectionPool.getDBConnection("IndexEventDAO.remove"); |
|
86 |
serialNumber = dbConn.getCheckOutSerialNumber(); |
|
87 |
|
|
88 |
// Execute the statement |
|
89 |
PreparedStatement stmt = dbConn.prepareStatement(sql); |
|
90 |
stmt.setString(1, identifier.getValue()); |
|
91 |
stmt.execute(); |
|
92 |
stmt.close(); |
|
93 |
} finally { |
|
94 |
// Return database connection to the pool |
|
95 |
DBConnectionPool.returnDBConnection(dbConn, serialNumber); |
|
96 |
} |
|
97 |
} |
|
98 |
|
|
99 |
public IndexEvent get(Identifier identifier) throws SQLException { |
|
100 |
IndexEvent event = null; |
|
101 |
String sql = "select guid, event_action, description, event_date from index_event where guid = ?"; |
|
102 |
DBConnection dbConn = null; |
|
103 |
int serialNumber = -1; |
|
104 |
try { |
|
105 |
// Get a database connection from the pool |
|
106 |
dbConn = DBConnectionPool.getDBConnection("IndexEventDAO.get"); |
|
107 |
serialNumber = dbConn.getCheckOutSerialNumber(); |
|
108 |
|
|
109 |
// Execute the statement |
|
110 |
PreparedStatement stmt = dbConn.prepareStatement(sql); |
|
111 |
stmt.setString(1, identifier.getValue()); |
|
112 |
ResultSet rs = stmt.executeQuery(); |
|
113 |
while (rs.next()) { |
|
114 |
//String guid = rs.getString(1); |
|
115 |
String action = rs.getString(2); |
|
116 |
String description = rs.getString(3); |
|
117 |
Timestamp timestamp = rs.getTimestamp(4); |
|
118 |
|
|
119 |
event = new IndexEvent(); |
|
120 |
event.setIdentifier(identifier); |
|
121 |
event.setAction(Event.valueOf(action)); |
|
122 |
event.setDate(timestamp); |
|
123 |
event.setDescription(description); |
|
124 |
} |
|
125 |
stmt.close(); |
|
126 |
} finally { |
|
127 |
// Return database connection to the pool |
|
128 |
DBConnectionPool.returnDBConnection(dbConn, serialNumber); |
|
129 |
} |
|
130 |
return event; |
|
131 |
} |
|
132 |
|
|
133 |
public Set<Identifier> getAllIdentifiers() throws SQLException { |
|
134 |
|
|
135 |
Set<Identifier> identifiers = new TreeSet<Identifier>(); |
|
136 |
String sql = "select guid from index_event"; |
|
137 |
DBConnection dbConn = null; |
|
138 |
int serialNumber = -1; |
|
139 |
try { |
|
140 |
// Get a database connection from the pool |
|
141 |
dbConn = DBConnectionPool.getDBConnection("IndexEventDAO.getAllIdentifiers"); |
|
142 |
serialNumber = dbConn.getCheckOutSerialNumber(); |
|
143 |
|
|
144 |
// Execute the statement |
|
145 |
PreparedStatement stmt = dbConn.prepareStatement(sql); |
|
146 |
ResultSet rs = stmt.executeQuery(); |
|
147 |
while (rs.next()) { |
|
148 |
String guid = rs.getString(1); |
|
149 |
Identifier identifier = new Identifier(); |
|
150 |
identifier.setValue(guid); |
|
151 |
identifiers.add(identifier); |
|
152 |
} |
|
153 |
stmt.close(); |
|
154 |
} finally { |
|
155 |
// Return database connection to the pool |
|
156 |
DBConnectionPool.returnDBConnection(dbConn, serialNumber); |
|
157 |
} |
|
158 |
return identifiers; |
|
159 |
} |
|
160 |
|
|
161 |
} |
|
0 | 162 |
docs/user/metacat/source/conf.py | ||
---|---|---|
47 | 47 |
# The short X.Y version. |
48 | 48 |
version = '2.0' |
49 | 49 |
# The full version, including alpha/beta/rc tags. |
50 |
release = '2.0.7'
|
|
50 |
release = '2.1.0'
|
|
51 | 51 |
|
52 | 52 |
# The language for content autogenerated by Sphinx. Refer to documentation |
53 | 53 |
# for a list of supported languages. |
docs/user/metacat/source/install.rst | ||
---|---|---|
83 | 83 |
Downloading the Metacat Installer is the simplest way to get started with the |
84 | 84 |
application. To download the installer: |
85 | 85 |
|
86 |
1. Browse to the `Metacat Download Page`_. In the Metacat section, select the link to the "GZIP file" (the link should look like: metacat-bin-X.X.X.tar.gz, where X.X.X is the latest version of Metacat e.g., 2.0.7)
|
|
86 |
1. Browse to the `Metacat Download Page`_. In the Metacat section, select the link to the "GZIP file" (the link should look like: metacat-bin-X.X.X.tar.gz, where X.X.X is the latest version of Metacat e.g., 2.1.0)
|
|
87 | 87 |
2. Save the file locally. |
88 | 88 |
3. Extract the Metacat package files by typing: |
89 | 89 |
|
... | ... | |
115 | 115 |
............................ |
116 | 116 |
To get the Metacat source distribution: |
117 | 117 |
|
118 |
1. Browse to the `Metacat Download Page`_. In the Metacat section, select the link to the Metacat Source code (it will look something like this: metacat-src-X.X.X.tar.gz, where X.X.X is the latest version of Metacat, e.g., 2.0.7).
|
|
118 |
1. Browse to the `Metacat Download Page`_. In the Metacat section, select the link to the Metacat Source code (it will look something like this: metacat-src-X.X.X.tar.gz, where X.X.X is the latest version of Metacat, e.g., 2.1.0).
|
|
119 | 119 |
2. Save the file locally. |
120 | 120 |
3. Extract the Metacat package files by typing (replace X.X.X with the current version number): |
121 | 121 |
|
docs/user/metacat/source/index.rst | ||
---|---|---|
2 | 2 |
Metacat Administrator's Guide |
3 | 3 |
============================= |
4 | 4 |
|
5 |
.. sidebar:: Version: 2.0.7
|
|
5 |
.. sidebar:: Version: 2.1.0
|
|
6 | 6 |
|
7 | 7 |
.. image:: themes/readable/static/metacat-logo.png |
8 | 8 |
:height: 130pt |
... | ... | |
13 | 13 |
|
14 | 14 |
License: GPL |
15 | 15 |
|
16 |
Release Date: May, 2013
|
|
16 |
Release Date: July, 2013
|
|
17 | 17 |
|
18 | 18 |
Metacat is a repository for data and metadata (documentation about data) that helps |
19 | 19 |
scientists find, understand and effectively use data sets they manage or that |
... | ... | |
26 | 26 |
- Download Metacat |
27 | 27 |
|
28 | 28 |
- Binary Distribution (A war file installation) |
29 |
- GZIP File: metacat-bin-2.0.7.tar.gz_
|
|
30 |
- ZIP File: metacat-bin-2.0.7.zip_
|
|
29 |
- GZIP File: metacat-bin-2.1.0.tar.gz_
|
|
30 |
- ZIP File: metacat-bin-2.1.0.zip_
|
|
31 | 31 |
- Source Distribution (Full source, requiring build) |
32 |
- GZIP File: metacat-src-2.0.7.tar.gz_
|
|
33 |
- ZIP File: metacat-src-2.0.7.zip_
|
|
32 |
- GZIP File: metacat-src-2.1.0.tar.gz_
|
|
33 |
- ZIP File: metacat-src-2.1.0.zip_
|
|
34 | 34 |
- `Older versions`_ |
35 | 35 |
|
36 | 36 |
- For Developers: Metacat `API documentation`_ |
... | ... | |
39 | 39 |
|
40 | 40 |
.. _API documentation: ./api/index.html |
41 | 41 |
|
42 |
.. _metacat-bin-2.0.7.tar.gz: http://knb.ecoinformatics.org/software/dist/metacat-bin-2.0.7.tar.gz
|
|
42 |
.. _metacat-bin-2.1.0.tar.gz: http://knb.ecoinformatics.org/software/dist/metacat-bin-2.1.0.tar.gz
|
|
43 | 43 |
|
44 |
.. _metacat-bin-2.0.7.zip: http://knb.ecoinformatics.org/software/dist/metacat-bin-2.0.7.zip
|
|
44 |
.. _metacat-bin-2.1.0.zip: http://knb.ecoinformatics.org/software/dist/metacat-bin-2.1.0.zip
|
|
45 | 45 |
|
46 |
.. _metacat-src-2.0.7.tar.gz: http://knb.ecoinformatics.org/software/dist/metacat-src-2.0.7.tar.gz
|
|
46 |
.. _metacat-src-2.1.0.tar.gz: http://knb.ecoinformatics.org/software/dist/metacat-src-2.1.0.tar.gz
|
|
47 | 47 |
|
48 |
.. _metacat-src-2.0.7.zip: http://knb.ecoinformatics.org/software/dist/metacat-src-2.0.7.zip
|
|
48 |
.. _metacat-src-2.1.0.zip: http://knb.ecoinformatics.org/software/dist/metacat-src-2.1.0.zip
|
|
49 | 49 |
|
50 | 50 |
.. _Older versions: http://knb.ecoinformatics.org/software/dist/ |
51 | 51 |
|
build.properties | ||
---|---|---|
2 | 2 |
|
3 | 3 |
#Version of this build. This needs to be a dotted numeric version. For |
4 | 4 |
#instance 1.9.1 is okay. 1.9.1_rc1 is not. |
5 |
metacat.version=2.0.7
|
|
5 |
metacat.version=2.1.0
|
|
6 | 6 |
|
7 | 7 |
#This is for packaging purposes. leave it blank for final production release. |
8 | 8 |
metacat.releaseCandidate= |
README | ||
---|---|---|
8 | 8 |
Metacat: XML Metadata and Data Management System |
9 | 9 |
------------------------------------------------ |
10 | 10 |
|
11 |
Version: 2.0.7 Release
|
|
11 |
Version: 2.1.0 Release
|
|
12 | 12 |
|
13 | 13 |
Send feedback and bugs to: metacat-dev@ecoinformatics.org |
14 | 14 |
http://bugzilla.ecoinformatics.org |
... | ... | |
53 | 53 |
See the file "docs/install.html" for detailed instructions |
54 | 54 |
for your OS. |
55 | 55 |
|
56 |
Release Notes for 2.1.0: |
|
57 |
------------------------ |
|
58 |
This is a major release of Metacat that includes a SOLR-based search feature |
|
59 |
* Optional SOLR search index |
|
60 |
* Client certificate delegation (using a service provider like CILogon) |
|
61 |
|
|
56 | 62 |
Release Notes for 2.0.7: |
57 | 63 |
------------------------ |
58 | 64 |
This is a patch release for Metacat replication |
Also available in: Unified diff
upgrade to Metacat 2.1.0 on the trunk. This includes a new index_event table for storing indexing events that need to be reprocessed. https://projects.ecoinformatics.org/ecoinfo/issues/5944