Index: jeeves/src/jeeves/utils/Xml.java =================================================================== --- jeeves/src/jeeves/utils/Xml.java (revision 5897) +++ jeeves/src/jeeves/utils/Xml.java (working copy) @@ -67,6 +67,7 @@ import org.xml.sax.SAXParseException; import org.xml.sax.helpers.DefaultHandler; import net.sf.saxon.FeatureKeys; +import net.sf.saxon.Configuration; //============================================================================= @@ -220,6 +221,7 @@ transFact.setAttribute(FeatureKeys.VERSION_WARNING,false); transFact.setAttribute(FeatureKeys.LINE_NUMBERING,true); transFact.setAttribute(FeatureKeys.PRE_EVALUATE_DOC_FUNCTION,true); + transFact.setAttribute(FeatureKeys.RECOVERY_POLICY,Configuration.RECOVER_SILENTLY); // Add the following to get timing info on xslt transformations //transFact.setAttribute(FeatureKeys.TIMING,true); } catch (IllegalArgumentException e) { @@ -267,6 +269,7 @@ try { factory.setAttribute(FeatureKeys.VERSION_WARNING,false); factory.setAttribute(FeatureKeys.LINE_NUMBERING,true); + factory.setAttribute(FeatureKeys.RECOVERY_POLICY,Configuration.RECOVER_SILENTLY); } catch (IllegalArgumentException e) { System.out.println("WARNING: transformerfactory doesnt like saxon attributes!"); //e.printStackTrace(); Index: jeeves/src/jeeves/utils/Log.java =================================================================== --- jeeves/src/jeeves/utils/Log.java (revision 5897) +++ jeeves/src/jeeves/utils/Log.java (working copy) @@ -43,6 +43,7 @@ public static final String SCHEDULER = JEEVES+ ".scheduler"; public static final String RESOURCES = JEEVES+ ".resources"; public static final String DBMS = JEEVES+ ".dbms"; + public static final String DBMSPOOL = JEEVES+ ".dbmspool"; public class Dbms { Index: jeeves/src/jeeves/server/resources/ResourceManager.java =================================================================== --- jeeves/src/jeeves/server/resources/ResourceManager.java (revision 5897) +++ jeeves/src/jeeves/server/resources/ResourceManager.java (working copy) @@ -75,6 +75,16 @@ } //-------------------------------------------------------------------------- + /** Gets properties from the named resource provider + */ + + public Map getProps(String name) throws Exception + { + ResourceProvider provider = provManager.getProvider(name); + return provider.getProps(); + } + + //-------------------------------------------------------------------------- /** Closes all resources doing a commit */ Index: jeeves/src/jeeves/server/resources/ResourceProvider.java =================================================================== --- jeeves/src/jeeves/server/resources/ResourceProvider.java (revision 5897) +++ jeeves/src/jeeves/server/resources/ResourceProvider.java (working copy) @@ -23,6 +23,8 @@ package jeeves.server.resources; +import java.util.Map; + import org.jdom.Element; //============================================================================= @@ -40,6 +42,9 @@ /** Initializes the provider */ public void init(String name, Element config) throws Exception; + /** gets props from the provider */ + public Map getProps(); + /** Stops the provider */ public void end(); Index: jeeves/src/jeeves/resources/dbms/DbmsPool.java =================================================================== --- jeeves/src/jeeves/resources/dbms/DbmsPool.java (revision 5897) +++ jeeves/src/jeeves/resources/dbms/DbmsPool.java (working copy) @@ -23,12 +23,19 @@ package jeeves.resources.dbms; +import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + import jeeves.constants.Jeeves; import jeeves.server.resources.ResourceListener; import jeeves.server.resources.ResourceProvider; +import jeeves.utils.Log; + import org.jdom.Element; //============================================================================= @@ -43,11 +50,12 @@ private String name; private String user; private String passwd; + private String url; private int maxTries; private int maxWait; private long reconnectTime; - private HashSet hsListeners = new HashSet(); + private Set hsListeners = Collections.synchronizedSet(new HashSet()); //-------------------------------------------------------------------------- //--- @@ -64,8 +72,8 @@ user = config.getChildText(Jeeves.Res.Pool.USER); passwd = config.getChildText(Jeeves.Res.Pool.PASSWORD); + url = config.getChildText(Jeeves.Res.Pool.URL); String driver = config.getChildText(Jeeves.Res.Pool.DRIVER); - String url = config.getChildText(Jeeves.Res.Pool.URL); String size = config.getChildText(Jeeves.Res.Pool.POOL_SIZE); String maxt = config.getChildText(Jeeves.Res.Pool.MAX_TRIES); String maxw = config.getChildText(Jeeves.Res.Pool.MAX_WAIT); @@ -86,6 +94,16 @@ //-------------------------------------------------------------------------- + public Map getProps() { + Map result = new HashMap(); + result.put("name", name); + result.put("user", user); + result.put("password", passwd); + result.put("url", url); + return result; + } + + //-------------------------------------------------------------------------- public void end() { for(Enumeration e=htDbms.keys(); e.hasMoreElements();) @@ -111,10 +129,13 @@ for (int nTries = 0; nTries < maxTries; nTries++) { // try to get a free dbms + int i = 0; for(Enumeration e=htDbms.keys(); e.hasMoreElements();) { + Dbms dbms = (Dbms) e.nextElement(); Boolean locked = (Boolean) htDbms.get(dbms); + debug("DBMS Resource "+i+" is "+locked); if (!locked.booleanValue()) { @@ -131,7 +152,7 @@ if (currTime - lastConnTime >= reconnectTime) { - System.out.println("reconnecting: " + (currTime - lastConnTime) + ">=" + reconnectTime + " ms since last connection"); // FIXME + error("reconnecting: " + (currTime - lastConnTime) + ">=" + reconnectTime + " ms since last connection"); // FIXME // FIXME: what happens if it disconnects but is unable to connect again? dbms.disconnect(); @@ -139,6 +160,7 @@ } } + debug("SUCCESS: DBMS Resource "+i+" is not locked"); htDbms.put(dbms, new Boolean(true)); return dbms; } @@ -148,6 +170,7 @@ lastMessage = ex.getMessage(); } } + i++; } // wait MAX_WAIT msecs (but not after last try) if (nTries < maxTries - 1) @@ -166,12 +189,15 @@ public void close(Object resource) throws Exception { checkResource(resource); + debug("Committing and closing "+resource); ((Dbms) resource).commit(); htDbms.put(resource, new Boolean(false)); - for(ResourceListener l : hsListeners) - l.close(resource); + synchronized(hsListeners) { + for(ResourceListener l : hsListeners) + l.close(resource); + } } //-------------------------------------------------------------------------- @@ -181,6 +207,7 @@ public void abort(Object resource) throws Exception { checkResource(resource); + debug("Aborting "+resource); try { @@ -191,22 +218,24 @@ htDbms.put(resource, new Boolean(false)); } - for(ResourceListener l : hsListeners) - l.abort(resource); + synchronized(hsListeners) { + for(ResourceListener l : hsListeners) + l.abort(resource); + } } //-------------------------------------------------------------------------- public void addListener(ResourceListener l) { - hsListeners.add(l); + hsListeners.add(l); } //-------------------------------------------------------------------------- public void removeListener(ResourceListener l) { - hsListeners.remove(l); + hsListeners.remove(l); } //-------------------------------------------------------------------------- @@ -225,6 +254,11 @@ if (!locked.booleanValue()) throw new IllegalArgumentException("Resource not locked :"+resource); } + + private void debug (String message) { Log.debug (Log.DBMSPOOL, message); } + static void info (String message) { Log.info (Log.DBMSPOOL, message); } + private void warning(String message) { Log.warning(Log.DBMSPOOL, message); } + static void error (String message) { Log.error (Log.DBMSPOOL, message); } } //============================================================================= Index: jeeves/src/jeeves/resources/dbms/Dbms.java =================================================================== --- jeeves/src/jeeves/resources/dbms/Dbms.java (revision 5897) +++ jeeves/src/jeeves/resources/dbms/Dbms.java (working copy) @@ -83,10 +83,12 @@ public void connect(String username, String password) throws SQLException { - conn = DriverManager.getConnection(url, username, password); + String actualUrl = url; + if (actualUrl.contains("postgis")) actualUrl = actualUrl.replaceFirst("postgis","postgresql"); + conn = DriverManager.getConnection(actualUrl, username, password); conn.setAutoCommit(false); - if (url.toUpperCase().contains("ORACLE")) { + if (actualUrl.toUpperCase().contains("ORACLE")) { Log.debug(Log.RESOURCES,"ORACLE is using TRANSACTION_READ_COMMITTED"); conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); } else { Index: src/org/fao/geonet/kernel/MetadataIndexerProcessor.java =================================================================== --- src/org/fao/geonet/kernel/MetadataIndexerProcessor.java (revision 0) +++ src/org/fao/geonet/kernel/MetadataIndexerProcessor.java (revision 0) @@ -0,0 +1,44 @@ +//============================================================================== +//=== This program is free software; you can redistribute it and/or modify +//=== it under the terms of the GNU General Public License as published by +//=== the Free Software Foundation; either version 2 of the License, or (at +//=== your option) any later version. +//=== +//=== This program is distributed in the hope that it will be useful, but +//=== WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== General Public License for more details. +//=== +//=== You should have received a copy of the GNU General Public License +//=== along with this program; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +//=== +//=== Contact: Jeroen Ticheler email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.kernel; + +/** + * Template Method abstract class to handle faster indexing of many metadata + * documents. + * Each subclass must implement process method and define a custom constructor + * with all parameters required to exec the process method + */ +public abstract class MetadataIndexerProcessor { + protected DataManager dm; + + public MetadataIndexerProcessor(DataManager dm) { + this.dm = dm; + } + + public abstract void process() throws Exception; + + public void processWithFastIndexing() throws Exception { + dm.startIndexGroup(); + try { + process(); + } finally { + dm.endIndexGroup(); + } + } +} Index: src/org/fao/geonet/kernel/DataManager.java =================================================================== --- src/org/fao/geonet/kernel/DataManager.java (revision 5897) +++ src/org/fao/geonet/kernel/DataManager.java (working copy) @@ -102,59 +102,61 @@ * @param force Force reindexing all from scratch * **/ - public void init(Dbms dbms, Boolean force) throws Exception { + public synchronized void init(Dbms dbms, Boolean force) throws Exception { // get all metadata from DB Element result = dbms.select("SELECT id, changeDate FROM Metadata ORDER BY id ASC"); - List list = result.getChildren(); - Log.debug(Geonet.DATA_MANAGER, "DB CONTENT:\n'"+ Xml.getString(result) +"'"); //DEBUG + Log.debug(Geonet.DATA_MANAGER, "DB CONTENT:\n'"+ Xml.getString(result) +"'"); - // get all metadata from index - Hashtable docs = searchMan.getDocs(); + // get lastchangedate of all metadata in index + HashMap docs = searchMan.getDocsChangeDate(); - Log.debug(Geonet.DATA_MANAGER, "INDEX CONTENT:"); //DEBUG + Log.debug(Geonet.DATA_MANAGER, "INDEX CONTENT:"); // index all metadata in DBMS if needed - for(int i = 0; i < list.size(); i++) - { - // get metadata - Element record = (Element) list.get(i); - String id = record.getChildText("id"); - - Log.debug(Geonet.DATA_MANAGER, "- record ("+ id +")"); //DEBUG - - Hashtable idxRec = (Hashtable)docs.get(id); - - // if metadata is not indexed index it - if (idxRec == null) - indexMetadata(dbms, id); - - // else, if indexed version is not the latest index it - else + startIndexGroup(); + try { + for(int i = 0; i < result.getContentSize(); i++) { - docs.remove(id); + // get metadata + Element record = (Element) result.getContent(i); + String id = record.getChildText("id"); + + Log.debug(Geonet.DATA_MANAGER, "- record ("+ id +")"); + + String idxLastChange = (String)docs.get(id); - String lastChange = record.getChildText("changedate"); - String idxLastChange = (String)idxRec.get("_changeDate"); - - Log.debug(Geonet.DATA_MANAGER, "- lastChange: " + lastChange); //DEBUG - Log.debug(Geonet.DATA_MANAGER, "- idxLastChange: " + idxLastChange); //DEBUG - - if (force || !idxLastChange.equalsIgnoreCase(lastChange)) // date in index contains 't', date in DBMS contains 'T' - indexMetadata(dbms, id); + // if metadata is not indexed index it + if (idxLastChange == null) + indexMetadataGroup(dbms, id); + + // else, if indexed version is not the latest index it + else + { + docs.remove(id); + + String lastChange = record.getChildText("changedate"); + + Log.debug(Geonet.DATA_MANAGER, "- lastChange: " + lastChange); + Log.debug(Geonet.DATA_MANAGER, "- idxLastChange: " + idxLastChange); + + if (force || !idxLastChange.equalsIgnoreCase(lastChange)) // date in index contains 't', date in DBMS contains 'T' + indexMetadataGroup(dbms, id); + } } + } finally { + endIndexGroup(); } - Log.debug(Geonet.DATA_MANAGER, "INDEX SURPLUS:"); //DEBUG + Log.debug(Geonet.DATA_MANAGER, "INDEX HAS RECORDS THAT ARE NOT IN DB:"); // remove from index metadata not in DBMS - for (Enumeration i = docs.keys(); i.hasMoreElements(); ) + for ( String id : docs.keySet() ) { - String id = (String)i.nextElement(); searchMan.delete("_id", id); - Log.debug(Geonet.DATA_MANAGER, "- record (" + id + ")"); //DEBUG + Log.debug(Geonet.DATA_MANAGER, "- removed record (" + id + ") from index"); } } @@ -165,16 +167,42 @@ Log.debug(Geonet.DATA_MANAGER, "Indexing record (" + id + ")"); //DEBUG - indexMetadata(dbms, id, searchMan); + indexMetadata(dbms, id, searchMan, false); } //-------------------------------------------------------------------------- + public void startIndexGroup() throws Exception { + searchMan.startIndexGroup(); + } + + //-------------------------------------------------------------------------- + + public void endIndexGroup() throws Exception { + searchMan.endIndexGroup(); + } + + //-------------------------------------------------------------------------- + + public void indexMetadataGroup(Dbms dbms, String id) throws Exception { + Log.debug(Geonet.DATA_MANAGER, "Indexing record (" + id + ")"); //DEBUG + indexMetadata(dbms, id, searchMan, true); + } + + //-------------------------------------------------------------------------- + public static void indexMetadata(Dbms dbms, String id, SearchManager sm) throws Exception { + indexMetadata(dbms, id, sm, false); + } + + //-------------------------------------------------------------------------- + + public static void indexMetadata(Dbms dbms, String id, SearchManager sm, boolean indexGroup) throws Exception + { try { - indexMetadataI(dbms, id, sm); + indexMetadataI(dbms, id, sm, indexGroup); } catch (Exception e) { @@ -185,7 +213,7 @@ //-------------------------------------------------------------------------- - private static void indexMetadataI(Dbms dbms, String id, SearchManager sm) throws Exception + private static void indexMetadataI(Dbms dbms, String id, SearchManager sm, boolean indexGroup) throws Exception { Vector moreFields = new Vector(); @@ -255,11 +283,20 @@ moreFields.add(makeField("_cat", categoryName, true, true, false)); } - sm.index(schema, md, id, moreFields, isTemplate, title); + if (indexGroup) { + sm.indexGroup(schema, md, id, moreFields, isTemplate, title); + } else { + sm.index(schema, md, id, moreFields, isTemplate, title); + } } //-------------------------------------------------------------------------- + public void rescheduleOptimizer(int minutes) throws Exception { + searchMan.rescheduleOptimizer(minutes); + } + + //-------------------------------------------------------------------------- private static Element makeField(String name, String value, boolean store, boolean index, boolean token) { @@ -630,34 +667,46 @@ public void setTemplate(Dbms dbms, int id, String isTemplate, String title) throws Exception { + setTemplateExt(dbms, id, isTemplate, title); + indexMetadata(dbms, Integer.toString(id)); + } + + //-------------------------------------------------------------------------- + + public void setTemplateExt(Dbms dbms, int id, String isTemplate, String title) throws Exception + { if (title == null) dbms.execute("UPDATE Metadata SET isTemplate=? WHERE id=?", isTemplate, id); else dbms.execute("UPDATE Metadata SET isTemplate=?, title=? WHERE id=?", isTemplate, title, id); - indexMetadata(dbms, Integer.toString(id)); } //-------------------------------------------------------------------------- public void setHarvested(Dbms dbms, int id, String harvestUuid) throws Exception { + setHarvestedExt(dbms, id, harvestUuid); + indexMetadata(dbms, Integer.toString(id)); + } + + //-------------------------------------------------------------------------- + + public void setHarvestedExt(Dbms dbms, int id, String harvestUuid) throws Exception + { String value = (harvestUuid != null) ? "y" : "n"; if (harvestUuid == null) { dbms.execute("UPDATE Metadata SET isHarvested=? WHERE id=?", value,id ); } else { dbms.execute("UPDATE Metadata SET isHarvested=?, harvestUuid=? WHERE id=?", value, harvestUuid, id); } - - indexMetadata(dbms, Integer.toString(id)); } //-------------------------------------------------------------------------- - public void setHarvested(Dbms dbms, int id, String harvestUuid, String harvestUri) throws Exception + public void setHarvestedExt(Dbms dbms, int id, String harvestUuid, String harvestUri) throws Exception { String value = (harvestUuid != null) ? "y" : "n"; String query = "UPDATE Metadata SET isHarvested=?, harvestUuid=?, harvestUri=? WHERE id=?"; dbms.execute(query, value, harvestUuid, harvestUri, id); - indexMetadata(dbms, Integer.toString(id)); } //--------------------------------------------------------------------------- @@ -2294,12 +2343,15 @@ } // add subtemplates + /* -- don't add as we need to investigate indexing for the fields + -- in the metadata table used here List subList = getSubtemplates(dbms, schema); if (subList != null) { Element subs = new Element(Edit.Info.Elem.SUBTEMPLATES); subs.addContent(subList); info.addContent(subs); } + */ return info; } Index: src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java =================================================================== --- src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java (revision 5897) +++ src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java (working copy) @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -42,6 +43,8 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -49,11 +52,13 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CachingWrapperFilter; import org.apache.lucene.search.Filter; -import org.apache.lucene.search.Hits; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldCollector; + import org.fao.geonet.GeonetContext; import org.fao.geonet.constants.Geonet; import org.fao.geonet.csw.common.Csw; @@ -67,6 +72,7 @@ import org.fao.geonet.kernel.search.LuceneUtils; import org.fao.geonet.kernel.search.SearchManager; import org.fao.geonet.kernel.search.spatial.Pair; + import org.jdom.Element; //============================================================================= @@ -74,7 +80,11 @@ public class CatalogSearcher { private Element _summaryConfig; private Map _isTokenizedField = new HashMap(); - private Hits _hits; + private FieldSelector _selector; + private Query _query; + private CachingWrapperFilter _filter; + private Sort _sort; + private String _lang; public CatalogSearcher(File summaryConfig) { try { @@ -85,6 +95,13 @@ throw new RuntimeException( "Error reading summary configuration file", e); } + + _selector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_id")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; } // --------------------------------------------------------------------------- @@ -100,7 +117,7 @@ */ public Pair> search(ServiceContext context, Element filterExpr, String filterVersion, Set typeNames, - Sort sort, ResultType resultType, int maxRecords) + Sort sort, ResultType resultType, int startPosition, int maxRecords) throws CatalogException { Element luceneExpr = filterToLucene(context, filterExpr); @@ -116,7 +133,7 @@ Pair> results = performSearch(context, luceneExpr, filterExpr, filterVersion, sort, resultType, - maxRecords); + startPosition, maxRecords); return results; } catch (Exception e) { Log.error(Geonet.CSW_SEARCH, "Error while searching metadata "); @@ -128,6 +145,7 @@ } } + // --------------------------------------------------------------------------- /** *

* Gets results in current searcher @@ -138,26 +156,37 @@ * @throws IOException * @throws CorruptIndexException */ - public Element getAll() throws CorruptIndexException, IOException { - Element response = new Element("response"); + public List getAllUuids(ServiceContext context, int maxHits) throws Exception { - if (_hits.length() == 0) { - response.setAttribute("from", 0 + ""); - response.setAttribute("to", 0 + ""); - return response; - } + FieldSelector uuidselector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_uuid")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; - response.setAttribute("from", 1 + ""); - response.setAttribute("to", _hits.length() + ""); - for (int i = 0; i < _hits.length(); i++) { - Document doc = _hits.doc(i); - String id = doc.get("_id"); + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + SearchManager sm = gc.getSearchmanager(); + IndexReader reader = sm.getIndexReader(); - // FAST mode - Element md = LuceneSearcher.getMetadataFromIndex(doc, id); - response.addContent(md); + Pair searchResults = LuceneSearcher.doSearchAndMakeSummary( maxHits, Integer.MAX_VALUE, _lang, ResultType.RESULTS.toString(), _summaryConfig, reader, _query, _filter, _sort, false); + TopFieldCollector tfc = searchResults.one(); + Element summary = searchResults.two(); + + int numHits = Integer.parseInt(summary.getAttributeValue("count")); + + Log.debug(Geonet.CSW_SEARCH, "Records matched : " + numHits); + + // --- retrieve results + + List response = new ArrayList(); + TopDocs tdocs = tfc.topDocs(0, maxHits); + + for ( ScoreDoc sdoc : tdocs.scoreDocs ) { + Document doc = reader.document(sdoc.doc, uuidselector); + String uuid = doc.get("_uuid"); + if (uuid != null) response.add(uuid); } - return response; } @@ -310,10 +339,10 @@ private Pair> performSearch( ServiceContext context, Element luceneExpr, Element filterExpr, - String filterVersion, Sort sort, ResultType resultType, - int maxRecords) throws Exception { - GeonetContext gc = (GeonetContext) context - .getHandlerContext(Geonet.CONTEXT_NAME); + String filterVersion, Sort sort, ResultType resultType, + int startPosition, int maxRecords) throws Exception { + + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); SearchManager sm = gc.getSearchmanager(); if (luceneExpr != null) @@ -324,6 +353,10 @@ .makeQuery(luceneExpr); Query groups = getGroupsQuery(context); + if (sort == null) { + sort = LuceneSearcher.makeSort(Collections.singletonList(Pair.read(Geonet.SearchResult.SortBy.RELEVANCE, true))); + } + // --- put query on groups in AND with lucene query BooleanQuery query = new BooleanQuery(); @@ -346,57 +379,51 @@ Log.debug(Geonet.CSW_SEARCH, "Lucene query: " + query.toString()); IndexReader reader = sm.getIndexReader(); - IndexSearcher searcher = new IndexSearcher(reader); - try { - // TODO Handle NPE creating spatial filter (due to constraint - // language version). - Filter spatialfilter = sm.getSpatial().filter(query, filterExpr, - filterVersion); + // TODO Handle NPE creating spatial filter (due to constraint + // language version). + Filter spatialfilter = sm.getSpatial().filter(query, filterExpr, filterVersion); + CachingWrapperFilter cFilter = null; + if (spatialfilter != null) cFilter = new CachingWrapperFilter(spatialfilter); + boolean buildSummary = resultType == ResultType.RESULTS_WITH_SUMMARY; - if (spatialfilter == null) { - _hits = searcher.search(query, sort); - } else { - _hits = searcher.search(query, new CachingWrapperFilter( - spatialfilter), sort); - } + // record globals for reuse + _query = query; + _filter = cFilter; + _sort = sort; + _lang = context.getLanguage(); - Log.debug(Geonet.CSW_SEARCH, "Records matched : " + _hits.length()); + Pair searchResults = LuceneSearcher.doSearchAndMakeSummary( startPosition + maxRecords, Integer.MAX_VALUE, context.getLanguage(), resultType.toString(), _summaryConfig, reader, query, cFilter, sort, buildSummary); + TopFieldCollector tfc = searchResults.one(); + Element summary = searchResults.two(); - // --- retrieve results + int numHits = Integer.parseInt(summary.getAttributeValue("count")); - List results = new ArrayList(); + Log.debug(Geonet.CSW_SEARCH, "Records matched : " + numHits); - for (int i = 0; i < _hits.length(); i++) { - Document doc = _hits.doc(i); - String id = doc.get("_id"); + // --- retrieve results - ResultItem ri = new ResultItem(id); - results.add(ri); + List results = new ArrayList(); + TopDocs hits = tfc.topDocs(startPosition, maxRecords); - for (String field : FieldMapper.getMappedFields()) { - String value = doc.get(field); + for (int i = 0; i < hits.scoreDocs.length; i++) { + Document doc = reader.document(hits.scoreDocs[i].doc, _selector); + String id = doc.get("_id"); - if (value != null) - ri.add(field, value); - } - } + ResultItem ri = new ResultItem(id); + results.add(ri); - Element summary = null; + for (String field : FieldMapper.getMappedFields()) { + String value = doc.get(field); - // Only compute GeoNetwork summary on results_with_summary option - if (resultType == ResultType.RESULTS_WITH_SUMMARY) { - summary = LuceneSearcher.makeSummary(_hits, _hits.length(), - _summaryConfig, resultType.toString(), - Integer.MAX_VALUE, context.getLanguage()); - summary.setName("Summary"); - summary.setNamespace(Csw.NAMESPACE_GEONET); + if (value != null) + ri.add(field, value); } + } - return Pair.read(summary, results); - } finally { - searcher.close(); - } + summary.setName("Summary"); + summary.setNamespace(Csw.NAMESPACE_GEONET); + return Pair.read(summary, results); } // --------------------------------------------------------------------------- Index: src/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java =================================================================== --- src/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java (revision 5897) +++ src/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java (working copy) @@ -24,6 +24,7 @@ package org.fao.geonet.kernel.csw.services.getrecords; import java.io.File; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -48,7 +49,11 @@ import org.fao.geonet.csw.common.exceptions.NoApplicableCodeEx; import org.fao.geonet.kernel.SelectionManager; import org.fao.geonet.kernel.search.spatial.Pair; + +import org.jdom.Content; import org.jdom.Element; +import org.jdom.Namespace; +import org.jdom.JDOMException; //============================================================================= @@ -77,9 +82,8 @@ { Element results = new Element("SearchResults", Csw.NAMESPACE_CSW); - Pair> summaryAndSearchResults = _searcher.search(context, filterExpr, filterVersion, typeNames, sort, resultType, maxRecords); + Pair> summaryAndSearchResults = _searcher.search(context, filterExpr, filterVersion, typeNames, sort, resultType, startPos, maxRecords); - UserSession session = context.getUserSession(); session.setProperty(Geonet.Session.SEARCH_RESULT, _searcher); @@ -97,37 +101,31 @@ } session.setProperty(Geonet.Session.SEARCH_REQUEST_ID, requestId); - int counter = 0; - List resultsList = summaryAndSearchResults.two(); - if (resultType == ResultType.RESULTS || resultType == ResultType.RESULTS_WITH_SUMMARY) - for (int i=startPos; (i 0) { + for (int i=0; (i counter) - { + if (numMatches > counter) { results.setAttribute("nextRecord", counter + startPos + ""); - } - else - { + } else { results.setAttribute("nextRecord","0"); - } + } - Element summary = summaryAndSearchResults.one(); return Pair.read(summary, results); } @@ -184,10 +182,12 @@ id + " schema.", schema); } } + + // We provide specific mappings for profiles to ISO or OGC in separate + // directories - Simon Pigot - Fix + //if (schema.contains("iso19139")) + // schema = "iso19139"; - if (schema.contains("iso19139")) - schema = "iso19139"; - String schemaDir = context.getAppPath() +"xml"+ FS +"csw"+ FS +"schemas"+ FS +schema+ FS; String styleSheet = schemaDir + prefix +"-"+ setName +".xsl"; @@ -200,19 +200,25 @@ //--- if the client has specified some ElementNames, then we remove the unwanted children - if (elemNames != null) - removeElements(res, elemNames); - + if (elemNames != null) { + if (outSchema != OutputSchema.OGC_CORE) { + try { + selectElementsUsingXPath(res, elemNames); + } catch (JDOMException e) { + throw new InvalidParameterValueEx("elementName has invalid XPath",""); + } + } else { + removeElements(res, elemNames); + } + } return res; - } - catch (Exception e) - { + } catch (Exception e) { context.error("Error while getting metadata with id : "+ id); context.error(" (C) StackTrace:\n"+ Util.getStackTrace(e)); throw new NoApplicableCodeEx("Raised exception while getting metadata :"+ e); - } - } + } + } //--------------------------------------------------------------------------- @@ -228,6 +234,39 @@ i.remove(); } } + + //--------------------------------------------------------------------------- + + private static void selectElementsUsingXPath(Element md, Set elemNames) throws Exception + { + + //-- build a union XPath from the elementNames specified + + StringBuffer xpath = new StringBuffer(); + for (String s : elemNames) { + xpath.append(s+"|"); + } + xpath.deleteCharAt(xpath.length()-1); + + //-- get all namespaces from the metadata + + List theNss = new ArrayList(); + Namespace ns = md.getNamespace(); + if (ns != null) { + theNss.add(ns); + theNss.addAll(md.getAdditionalNamespaces()); + } + + //-- get the elements from the XPath union + + Element theMd = (Element)md.clone(); + List content = Xml.selectNodes(theMd, xpath.toString(), theNss); + + md.removeContent(); + for (Content c : content) { + md.addContent(c.detach()); + } + } } //============================================================================= Index: src/org/fao/geonet/kernel/csw/services/Transaction.java =================================================================== --- src/org/fao/geonet/kernel/csw/services/Transaction.java (revision 5897) +++ src/org/fao/geonet/kernel/csw/services/Transaction.java (working copy) @@ -107,30 +107,42 @@ if( transactionType.equals("insert") || transactionType.equals("update") || transactionType.equals("delete") ) { List mdList = transRequest.getChildren(); + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + DataManager dataMan = gc.getDataManager(); // insert to database, and get the number of inserted successful if( transactionType.equals("insert" ) ) { Iterator inIt = mdList.iterator(); - while (inIt.hasNext()){ - Element metadata = (Element) inIt.next().clone(); - boolean insertSuccess = insertTransaction( metadata, strFileIds, context); - if (insertSuccess) - totalInserted++; + dataMan.startIndexGroup(); + try { + while (inIt.hasNext()){ + Element metadata = (Element) inIt.next().clone(); + boolean insertSuccess = insertTransaction( metadata, strFileIds, context); + if (insertSuccess) + totalInserted++; + } + } finally { + dataMan.endIndexGroup(); } } // Update else if( transactionType.equals("update" ) ) { Iterator inIt = mdList.iterator(); - while (inIt.hasNext()){ - Element metadata = (Element) inIt.next().clone(); - if (!metadata.getName().equals("Constraint") && !metadata.getNamespace().equals(Csw.NAMESPACE_CSW)) - { - boolean updateSuccess = updateTransaction( transRequest, metadata, context ); - if (updateSuccess) - totalUpdated++; + dataMan.startIndexGroup(); + try { + while (inIt.hasNext()){ + Element metadata = (Element) inIt.next().clone(); + if (!metadata.getName().equals("Constraint") && !metadata.getNamespace().equals(Csw.NAMESPACE_CSW)) + { + boolean updateSuccess = updateTransaction( transRequest, metadata, context ); + if (updateSuccess) + totalUpdated++; + } } + } finally { + dataMan.endIndexGroup(); } } // Delete @@ -236,14 +248,14 @@ if( id == null ) return false; - dataMan.indexMetadata(dbms, id); - - fileIds.add( uuid ); - // --- Insert category if requested if (!"_none_".equals(category)) dataMan.setCategory(dbms, id, category); + dataMan.indexMetadataGroup(dbms, id); + + fileIds.add( uuid ); + dbms.commit(); return true; @@ -294,6 +306,8 @@ throw new NoApplicableCodeEx("User not allowed to update this metadata("+id+")."); dataMan.updateMetadataExt(dbms, id, xml, changeDate); + dataMan.indexMetadataGroup(dbms, id); + bReturn = true; break; } Index: src/org/fao/geonet/kernel/SelectionManager.java =================================================================== --- src/org/fao/geonet/kernel/SelectionManager.java (revision 5897) +++ src/org/fao/geonet/kernel/SelectionManager.java (working copy) @@ -15,6 +15,7 @@ import org.fao.geonet.constants.Params; import org.fao.geonet.kernel.csw.services.getrecords.CatalogSearcher; import org.fao.geonet.kernel.search.LuceneSearcher; +import org.fao.geonet.kernel.setting.SettingInfo; import org.jdom.Element; /** @@ -27,6 +28,10 @@ public static final String SELECTION_METADATA = "metadata"; + // used to limit select all if get system setting maxrecords fails + // or contains value we can't parse + public static final int DEFAULT_MAXHITS = 1000; + private static final String STATUS_SELECTED = "status"; private static final String ADD_ALL_SELECTED = "add-all"; private static final String REMOVE_ALL_SELECTED = "remove-all"; @@ -230,7 +235,15 @@ */ public void selectAll(String type, ServiceContext context) { Set selection = selections.get(type); + SettingInfo si = new SettingInfo(context); + int maxhits = DEFAULT_MAXHITS; + try { + maxhits = Integer.parseInt(si.getSelectionMaxRecords()); + } catch (Exception e) { + e.printStackTrace(); + } + if (selection != null) selection.clear(); @@ -240,28 +253,17 @@ if (searcher == null) return; - Element ht; + List uuidList; try { if (searcher instanceof LuceneSearcher) - ht = ((LuceneSearcher) searcher).getAll(); + uuidList = ((LuceneSearcher) searcher).getAllUuids(maxhits); else if (searcher instanceof CatalogSearcher) - ht = ((CatalogSearcher) searcher).getAll(); + uuidList = ((CatalogSearcher) searcher).getAllUuids(context, maxhits); else return; + + selection.addAll(uuidList); - List elList = ht.getChildren(); - - for (Iterator iter = elList.iterator(); iter.hasNext();) { - Element element = (Element) iter.next(); - Element info = element.getChild(Edit.RootChild.INFO, - Edit.NAMESPACE); - String UUID = info.getChildText(Edit.Info.Elem.UUID); - - if (UUID == null) - continue; - - selection.add(UUID); - } } catch (Exception e) { e.printStackTrace(); } Index: src/org/fao/geonet/kernel/search/SearchManager.java =================================================================== --- src/org/fao/geonet/kernel/search/SearchManager.java (revision 5897) +++ src/org/fao/geonet/kernel/search/SearchManager.java (working copy) @@ -42,6 +42,7 @@ import javax.naming.Context; import javax.naming.InitialContext; +import jeeves.exceptions.JeevesException; import jeeves.resources.dbms.Dbms; import jeeves.utils.Log; import jeeves.utils.Util; @@ -53,6 +54,8 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; @@ -60,6 +63,7 @@ import org.apache.lucene.index.TermEnum; import org.apache.lucene.search.Filter; import org.apache.lucene.store.Directory; + import org.fao.geonet.constants.Geonet; import org.fao.geonet.csw.common.Csw; import org.fao.geonet.csw.common.exceptions.NoApplicableCodeEx; @@ -75,12 +79,15 @@ import org.fao.geonet.kernel.search.spatial.SpatialIndexWriter; import org.fao.geonet.kernel.search.spatial.TouchesFilter; import org.fao.geonet.kernel.search.spatial.WithinFilter; + +import org.geotools.data.DataStore; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureSource; import org.geotools.data.Transaction; import org.geotools.gml3.GMLConfiguration; import org.geotools.xml.Configuration; import org.geotools.xml.Parser; + import org.jdom.Element; import com.k_int.IR.Searchable; @@ -115,15 +122,21 @@ private LoggingContext _cat; private Searchable _hssSearchable; private Spatial _spatial; + private LuceneIndexWriterFactory _indexWriter; + private Timer _timer; + // minutes between optimizations of the lucene index + private int _interval = 30; //----------------------------------------------------------------------------- /** * @param appPath * @param luceneDir + * @param summaryConfigXmlFile + * @param dataStore * @throws Exception */ - public SearchManager(String appPath, String luceneDir, String summaryConfigXmlFile) throws Exception + public SearchManager(String appPath, String luceneDir, String summaryConfigXmlFile, DataStore dataStore, String optimizerInterval) throws Exception { _summaryConfig = Xml.loadStream(new FileInputStream(new File(appPath,summaryConfigXmlFile))); _stylesheetsDir = new File(appPath, SEARCH_STYLESHEETS_DIR_PATH); @@ -134,12 +147,11 @@ _luceneDir = new File(luceneDir+ "/nonspatial"); - if (!_luceneDir.isAbsolute()) - _luceneDir = new File(appPath + luceneDir+ "/nonspatial"); + if (!_luceneDir.isAbsolute()) _luceneDir = new File(appPath + luceneDir+ "/nonspatial"); - _luceneDir.getParentFile().mkdirs(); + _luceneDir.getParentFile().mkdirs(); - _spatial = new Spatial(_luceneDir.getParent() + "/spatial"); + _spatial = new Spatial(dataStore); // Define the default Analyzer _analyzer = new PerFieldAnalyzerWrapper(new StandardAnalyzer()); @@ -164,9 +176,19 @@ _analyzer.addAnalyzer("parentUuid", new StandardAnalyzer()); _analyzer.addAnalyzer("operatesOn", new StandardAnalyzer()); _analyzer.addAnalyzer("subject", new KeywordAnalyzer()); - + initLucene(appPath, luceneDir); initZ3950(appPath); + + try { + _interval = Integer.parseInt(optimizerInterval); + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Cannot parse optimizer schedule from setting with value "+optimizerInterval+": "+e.getMessage()); + } + + _timer = new Timer(true); + _timer.schedule(new OptimizeTask(), 0, _interval * 60 * 1000); // do it now and then every _interval minutes + } //----------------------------------------------------------------------------- @@ -174,10 +196,38 @@ public void end() throws Exception { endZ3950(); + _timer.cancel(); } //----------------------------------------------------------------------------- + public synchronized void rescheduleOptimizer(int interval) throws Exception + { + if (interval == _interval) return; // do nothing unless interval changes + + Log.debug(Geonet.INDEX_ENGINE, "Resetting schedule for thread that optimizes lucene index from "+_interval+" interval to "+interval+" interval"); + _interval = interval; + _timer.cancel(); + _timer = new Timer(true); + _timer.schedule(new OptimizeTask(), _interval * 60 * 1000, _interval * 60 * 1000); // do it in interval minutes and then again every interval minutes + } + + //----------------------------------------------------------------------------- + + class OptimizeTask extends TimerTask { + public void run() { + try { + _indexWriter.openWriter(); + _indexWriter.optimize(); + _indexWriter.closeWriter(); + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Optimize task (running every "+_interval+" interval) failed: "+e.getMessage()); + } + } + } + + //----------------------------------------------------------------------------- + public MetaSearcher newSearcher(int type, String stylesheetName) throws Exception { @@ -200,9 +250,7 @@ private void initLucene(String appPath, String luceneDir) throws Exception { - //--- the lucene dir cannot be inside the CVS so it is better to create - // it here - setupIndex(false); // RGFIX: check if this is correct + setupIndex(false); } //----------------------------------------------------------------------------- @@ -284,10 +332,45 @@ * @param title * @throws Exception */ - public synchronized void index(String type, Element metadata, String id, - List moreFields, String isTemplate, String title) throws Exception + public void index(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception { + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from index"); + _indexWriter.openWriter(); + try { + Document doc = buildIndexDocument(type, metadata, id, moreFields, isTemplate, title); + _indexWriter.addDocument(doc); + } finally { + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from index"); + _indexWriter.closeWriter(); + } + + _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + } + + public void startIndexGroup() throws Exception { + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from startIndexGroup"); + _indexWriter.openWriter(); + } + + public void indexGroup(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception + { + Document doc = buildIndexDocument(type, metadata, id, moreFields, isTemplate, title); + _indexWriter.addDocument(doc); + + _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + } + + public void endIndexGroup() throws Exception { + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from endIndexGroup"); + _indexWriter.closeWriter(); + } + + private Document buildIndexDocument(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception + { + + Log.debug(Geonet.INDEX_ENGINE, "Deleting "+id+" from index"); delete("_id", id); + Log.debug(Geonet.INDEX_ENGINE, "Finished Delete"); Element xmlDoc; @@ -322,16 +405,7 @@ + Xml.getString(xmlDoc)); Document doc = newDocument(xmlDoc); - - IndexWriter writer = new IndexWriter(_luceneDir, _analyzer, false); - try { - writer.addDocument(doc); - lazyOptimize(writer); - } finally { - writer.close(); - _indexReader = getIndexReader(); - } - _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + return doc; } /** @@ -378,20 +452,23 @@ //-------------------------------------------------------------------------------- // delete a document - public synchronized void delete(String fld, String txt) throws Exception { + public void delete(String fld, String txt) throws Exception { // possibly remove old document - IndexReader indexReader = IndexReader.open(_luceneDir); + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from delete"); + _indexWriter.openWriter(); try { - _spatial.writer().delete(txt); - indexReader.deleteDocuments(new Term(fld, txt)); + _indexWriter.deleteDocuments(new Term(fld, txt)); } finally { - indexReader.close(); + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from delete"); + _indexWriter.closeWriter(); } + _spatial.writer().delete(txt); } //-------------------------------------------------------------------------------- + // Dangerous or not at all possible if the index is very large! public Hashtable getDocs() throws Exception { _indexReader = getIndexReader(); @@ -417,6 +494,35 @@ //-------------------------------------------------------------------------------- + public HashMap getDocsChangeDate() throws Exception + { + _indexReader = getIndexReader(); + + FieldSelector idChangeDateSelector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_id") || name.equals("_changeDate")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; + + int capacity = (int)(_indexReader.maxDoc() / 0.75)+1; + HashMap docs = new HashMap(capacity); + for (int i = 0; i < _indexReader.numDocs(); i++) { + if (_indexReader.isDeleted(i)) continue; // FIXME: strange lucene hack: sometimes it tries to load a deleted document + Hashtable record = new Hashtable(); + Document doc = _indexReader.document(i, idChangeDateSelector); + String id = doc.get("_id"); + if (id == null) { + Log.error(Geonet.INDEX_ENGINE, "Document with no _id field skipped! Document is "+doc); + continue; + } + docs.put(id, doc.get("_changeDate")); + } + return docs; + } + + //-------------------------------------------------------------------------------- + public Vector getTerms(String fld) throws Exception { Vector terms = new Vector(); @@ -443,7 +549,7 @@ .getAbsolutePath(); return Xml.transform(xml, styleSheet); } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Indexing stylesheet contains errors : " + e.getMessage()); throw e; } @@ -459,7 +565,7 @@ .getAbsolutePath(); return Xml.transform(xml, styleSheetPath); } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Search stylesheet contains errors : " + e.getMessage()); throw e; } @@ -504,24 +610,44 @@ indexReader.close(); badIndex = false; } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Exception while opening lucene index, going to rebuild it: " + e.getMessage()); } } // if rebuild forced or bad index then rebuild index if (rebuild || badIndex) { - Log.error(Geonet.SEARCH_ENGINE, "Rebuilding lucene index"); - if (_spatial != null) - _spatial.writer().reset(); + Log.error(Geonet.INDEX_ENGINE, "Rebuilding lucene index"); + if (_spatial != null) _spatial.writer().reset(); IndexWriter writer = new IndexWriter(_luceneDir, _analyzer, true); writer.close(); } - _indexReader = IndexReader.open(_luceneDir); + _indexReader = IndexReader.open(_luceneDir); + _indexWriter = new LuceneIndexWriterFactory(_luceneDir, _analyzer); } + //---------------------------------------------------------------------------- /* + * Optimize the Lucene index + * + */ + public boolean optimizeIndex() { + try { + _indexWriter.openWriter(); + _indexWriter.optimize(); + _indexWriter.closeWriter(); + return true; + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, + "Exception while optimizing lucene index: " + + e.getMessage()); + return false; + } + } + + //---------------------------------------------------------------------------- + /* * Rebuild the Lucene index * * @param dataMan @@ -530,11 +656,14 @@ */ public boolean rebuildIndex(DataManager dataMan, Dbms dbms) { try { + if (_indexWriter.isOpen()) { + throw new Exception("Cannot rebuild index while it is being updated - please wait till later"); + } setupIndex(true); dataMan.init(dbms, true); return true; } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Exception while rebuilding lucene index, going to rebuild it: " + e.getMessage()); return false; @@ -542,7 +671,6 @@ } - // creates a new document private Document newDocument(Element xml) { @@ -585,49 +713,6 @@ //-------------------------------------------------------------------------------- - private static final long TIME_BETWEEN_OPTS = 1000; // time between two optimizations in ms - private static final int UPDTATES_BETWEEN_OPTS = 10; // number of updates between two optimizations - - private long lastOptTime = 0; // time since last optimization - private int updateCount = UPDTATES_BETWEEN_OPTS - 1; // number of updates since last uptimization - private boolean optimizing = false; // true iff optimization is in progress - private Object mutex = new Object(); // RGFIX: check concurrent access from multiple servlets - /** - * lazy optimization: optimize index if - * at least TIME_BETWEEN_OPTS time passed or - * at least UPDTATES_BETWEEN_OPTS updates were performed - * since last optimization - * @param writer - * @throws Exception - */ - private void lazyOptimize(IndexWriter writer) - throws Exception - { - if (optimizing) return; - - boolean doOptimize; - synchronized (mutex) - { - if (System.currentTimeMillis() - lastOptTime < TIME_BETWEEN_OPTS - && ++updateCount < UPDTATES_BETWEEN_OPTS) - doOptimize = false; - else - { - doOptimize = true; - optimizing = true; - updateCount = 0; - } - } - if (doOptimize) - { - // System.out.println("**** OPTIMIZING"); // DEBUG - - writer.optimize(); - lastOptTime = System.currentTimeMillis(); - optimizing = false; - } - } - public Spatial getSpatial() { return _spatial; @@ -636,6 +721,7 @@ public class Spatial { + private final DataStore _datastore; private static final long TIME_BETWEEN_SPATIAL_COMMITS = 10000; private final Map> _types; { @@ -667,7 +753,6 @@ } _types = Collections.unmodifiableMap(types); } - private final String _appPath; private final Transaction _transaction; private final Timer _timer; private final Parser _gmlParser; @@ -675,23 +760,16 @@ private SpatialIndexWriter _writer; private Committer _committerTask; - public Spatial(String appPath) throws Exception + public Spatial(DataStore dataStore) throws Exception { _lock = new ReentrantLock(); - _appPath = appPath; + _datastore = dataStore; _transaction = new DefaultTransaction("SpatialIndexWriter"); _timer = new Timer(true); _gmlParser = new Parser(new GMLConfiguration()); boolean rebuildIndex = false; - // This must be before createWriter because createWriter will create - // the file - // and therefore the test will not be worthwhile - if (!SpatialIndexWriter.createDataStoreFile(appPath).exists()) { - Log.error(Geonet.SEARCH_ENGINE, "Rebuild index because spatial index does not exist."); - rebuildIndex = true; - } - rebuildIndex = createWriter(appPath); + rebuildIndex = createWriter(_datastore); if (rebuildIndex) { setupIndex(true); }else{ @@ -703,16 +781,21 @@ addShutdownHook(); } - private boolean createWriter(String appPath) throws IOException + private boolean createWriter(DataStore datastore) throws IOException { boolean rebuildIndex; try { - _writer = new SpatialIndexWriter(appPath, _gmlParser, + _writer = new SpatialIndexWriter(datastore, _gmlParser, _transaction, _lock); rebuildIndex = _writer.getFeatureSource().getSchema() == null; } catch (Exception e) { - if (_writer != null) - _writer.delete(); + String exceptionString = Xml.getString(JeevesException.toElement(e)); + Log.warning(Geonet.SPATIAL, "Failure to make _writer, maybe a problem but might also not be an issue:"+exceptionString); + try { + _writer.reset(); + } catch (Exception e1) { + Log.error(Geonet.SPATIAL, "Unable to call reset on Spatial writer"); + } rebuildIndex = true; } return rebuildIndex; @@ -792,7 +875,7 @@ private SpatialIndexWriter writerNoLocking() throws Exception { if (_writer == null) { - _writer = new SpatialIndexWriter(_appPath, _gmlParser, + _writer = new SpatialIndexWriter(_datastore, _gmlParser, _transaction, _lock); } return _writer; Index: src/org/fao/geonet/kernel/search/LuceneSearcher.java =================================================================== --- src/org/fao/geonet/kernel/search/LuceneSearcher.java (revision 5897) +++ src/org/fao/geonet/kernel/search/LuceneSearcher.java (working copy) @@ -44,6 +44,9 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; +import org.apache.lucene.document.MapFieldSelector; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -52,7 +55,6 @@ import org.apache.lucene.search.CachingWrapperFilter; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.Hits; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; @@ -60,10 +62,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.RangeQuery; import org.apache.lucene.search.Searcher; +import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.WildcardQuery; + import org.fao.geonet.GeonetContext; import org.fao.geonet.constants.Edit; import org.fao.geonet.constants.Geonet; @@ -71,6 +78,7 @@ import org.fao.geonet.kernel.search.SummaryComparator.Type; import org.fao.geonet.kernel.search.spatial.Pair; import org.fao.geonet.lib.Lib; + import org.jdom.Content; import org.jdom.Element; @@ -88,18 +96,18 @@ private Element _summaryConfig; private IndexReader _reader; - private Searcher _searcher; private Query _query; private Filter _filter; private Sort _sort; - private Hits _hits; - private Hits _lastHits; + private TopFieldCollector _hits; private Element _elSummary; + private FieldSelector _selector; private int _maxSummaryKeys; + private int _maxHitsInSummary; private int _numHits; private String _resultType; - private String _language; + private String _language; //-------------------------------------------------------------------------------- // constructor @@ -108,6 +116,12 @@ _sm = sm; _styleSheetName = styleSheetName; _summaryConfig = summaryConfig; + _selector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_id")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; } //-------------------------------------------------------------------------------- @@ -118,13 +132,11 @@ { try { computeQuery(srvContext, request, config); - performQuery(request, srvContext!=null?true:false); initSearchRange(srvContext); + performQuery(getTo()); } finally { _hits = null; - _searcher.close(); - _searcher = null; setValid(false); } } @@ -138,11 +150,10 @@ public Element present(ServiceContext srvContext, Element request, ServiceConfig config) throws Exception { - if (!isValid()) - performQuery(request, srvContext!=null?true:false); - updateSearchRange(request); + if (!isValid()) performQuery(getTo()); // get enough hits to show a page + GeonetContext gc = null; if (srvContext != null) gc = (GeonetContext) srvContext.getHandlerContext(Geonet.CONTEXT_NAME); @@ -162,44 +173,47 @@ response.addContent((Element)_elSummary.clone()); - if (getTo() > 0) - { - for(int i = getFrom() - 1; i < getTo(); i++) - { - Document doc = _hits.doc(i); - String id = doc.get("_id"); - Element md = new Element ("md"); + if (getTo() > 0) { + TopDocs tdocs = _hits.topDocs(getFrom()-1, getTo()); + int nrHits = getTo() - (getFrom()-1); + if (tdocs.scoreDocs.length >= nrHits) { + for (int i = 0; i < nrHits; i++) { + Document doc = null; + if (fast) { + doc = _reader.document(tdocs.scoreDocs[i].doc); // no selector + } else { + doc = _reader.document(tdocs.scoreDocs[i].doc, _selector); + } + String id = doc.get("_id"); + Element md = new Element ("md"); - if (fast) - { - md = getMetadataFromIndex(doc, id); - } - else if (srvContext != null) - { - md = gc.getDataManager().getMetadata(srvContext, id, false); - } else { - md = null; - } + if (fast) { + md = getMetadataFromIndex(doc, id); + } else if (srvContext != null) { + md = gc.getDataManager().getMetadata(srvContext, id, false); + } else { + md = null; + } - //--- the search result is buffered so a metadata could have been deleted - //--- just before showing search results + //--- a metadata could have been deleted just before showing + //--- search results - if (md != null) - { - // Calculate score and add it to info elem - Float score = _hits.score(i); - Element info = md.getChild (Edit.RootChild.INFO, Edit.NAMESPACE); - addElement(info, Edit.Info.Elem.SCORE, score.toString()); - - response.addContent(md); + if (md != null) { + // Calculate score and add it to info elem + Float score = tdocs.scoreDocs[i].score; + Element info = md.getChild (Edit.RootChild.INFO, Edit.NAMESPACE); + addElement(info, Edit.Info.Elem.SCORE, score.toString()); + + response.addContent(md); + } } + } else { + throw new Exception("Failed: Not enough search results ("+tdocs.scoreDocs.length+") available to meet request for "+nrHits+"."); } } _hits = null; - _searcher.close(); - _searcher = null; - setValid(false); + setValid(false); return response; } @@ -221,20 +235,18 @@ } //-------------------------------------------------------------------------------- - // RGFIX: check this public void close() { - try - { - if (_searcher != null) { - _searcher.close(); - _searcher = null; - } - setValid(false); + try { + if (_reader != null) { + _reader.close(); + _reader = null; + } + setValid(false); } catch (IOException e) { e.printStackTrace(); - } // DEBUG + } } //-------------------------------------------------------------------------------- @@ -242,6 +254,11 @@ private void computeQuery(ServiceContext srvContext, Element request, ServiceConfig config) throws Exception { + + String sMaxRecordsInKeywordSummary = request.getChildText("maxHitsInSummary"); + if (sMaxRecordsInKeywordSummary == null) sMaxRecordsInKeywordSummary = config.getValue("maxHitsInSummary", "1000"); + _maxHitsInSummary = Integer.parseInt(sMaxRecordsInKeywordSummary); + String sMaxSummaryKeys = request.getChildText("maxSummaryKeys"); if (sMaxSummaryKeys == null) sMaxSummaryKeys = config.getValue("maxSummaryKeys", "10"); _maxSummaryKeys = Integer.parseInt(sMaxSummaryKeys); @@ -315,6 +332,7 @@ } } + //-------------------------------------------------------------------------------- /** * Execute Lucene query with sorting option. * @@ -322,31 +340,34 @@ * Default sort order option is not reverse order. Reverse order is active * if sort order option is set and not null */ - private void performQuery(Element request, boolean keepSearch) throws Exception + private void performQuery(int maxNumberOfHits) throws Exception { - _reader = _sm.getIndexReader(); - _searcher = new IndexSearcher(_reader); - - if (_filter == null) { - _hits = _searcher.search(_query, _sort); + _reader = IndexReader.open(_sm.getLuceneDir()); + + CachingWrapperFilter cFilter = null; + if (_filter != null) cFilter = new CachingWrapperFilter(_filter); + + int numHits; + boolean buildSummary = _elSummary == null; + if (buildSummary) { + // get as many results as instructed or enough for search summary + numHits = Math.max(_maxHitsInSummary,maxNumberOfHits); } else { - _hits = _searcher.search(_query, new CachingWrapperFilter(_filter), _sort); + numHits = maxNumberOfHits; } - _numHits = _hits.length(); + Pair results = doSearchAndMakeSummary( numHits, _maxSummaryKeys, _language, _resultType, _summaryConfig, _reader, _query, cFilter, _sort, buildSummary); + _hits = results.one(); + _elSummary = results.two(); + + _numHits = Integer.parseInt(_elSummary.getAttributeValue("count")); + Log.debug(Geonet.SEARCH_ENGINE, "Hits found : "+_numHits+""); - if (keepSearch) - _lastHits = _hits; + setValid(true); + } - Log.debug(Geonet.SEARCH_ENGINE, "Hits found : "+ _hits.length()); + //-------------------------------------------------------------------------------- - if (_elSummary == null) { - _elSummary = makeSummary(_hits, getSize(), _summaryConfig, _resultType, _maxSummaryKeys, _language); - } - - setValid(true); - } - private Geometry getGeometry(Element request) throws Exception { String geomWKT = Util.getParam(request, Geonet.SearchResult.GEOMETRY, null); @@ -357,6 +378,7 @@ return null; } + //-------------------------------------------------------------------------------- // Create the Sort to use in the search public static Sort makeSort(List> fields) { @@ -374,6 +396,7 @@ return sort; } + //-------------------------------------------------------------------------------- /** * Define sort field. By default, the field is assumed to be a string. * Only popularity and rating are sorted based on integer type. @@ -509,89 +532,181 @@ //-------------------------------------------------------------------------------- - public static Element makeSummary(Hits hits, int count, Element summaryConfig, String resultType, int maxSummaryKeys, String langCode) throws Exception - { - Element elSummary = new Element("summary"); + private static HashMap> getSummaryConfig(Element summaryConfig, String resultType, int maxSummaryKeys) throws Exception { - elSummary.setAttribute("count", count+""); - elSummary.setAttribute("type", "local"); - + HashMap> results = new HashMap>(); + Element resultTypeConfig = summaryConfig.getChild("def").getChild(resultType); List elements = resultTypeConfig.getChildren(); - + for (Element summaryElement : elements) { - String name = summaryElement.getAttributeValue("name"); - String plural = summaryElement.getAttributeValue("plural"); - String key = summaryElement.getAttributeValue("indexKey"); - String order = summaryElement.getAttributeValue("order"); - String maxString = summaryElement.getAttributeValue("max"); - String type = summaryElement.getAttributeValue("type"); - if (order == null) { - order = "frequency"; - } - int max; - if (maxString == null) { - max = 10; - } else { - max = Integer.parseInt(maxString); - } - max = Math.min(maxSummaryKeys, max); - if( type==null ){ - type = "string"; - } + String name = summaryElement.getAttributeValue("name"); + String plural = summaryElement.getAttributeValue("plural"); + String key = summaryElement.getAttributeValue("indexKey"); + String order = summaryElement.getAttributeValue("order"); + String maxString = summaryElement.getAttributeValue("max"); + String type = summaryElement.getAttributeValue("type"); + if (order == null) { + order = "frequency"; + } + int max; + if (maxString == null) { + max = 10; + } else { + max = Integer.parseInt(maxString); + } + max = Math.min(maxSummaryKeys, max); + if( type==null ){ + type = "string"; + } - SortOption sortOption = SortOption.parse(order); - SummaryComparator summaryComparator = new SummaryComparator(sortOption, Type.parse(type), langCode, summaryConfig.getChild("typeConfig")); - summarize(elSummary, hits, key, count, plural, name, summaryComparator, max); - } - return elSummary; + HashMap values = new HashMap(); + values.put("name", name); + values.put("plural", plural); + values.put("max", new Integer(max)); + values.put("order", order); + values.put("type", type); + values.put("typeConfig", summaryConfig.getChild("typeConfig")); + results.put(key,values); + } + + return results; } + + //-------------------------------------------------------------------------------- + + private static SummaryComparator getSummaryComparator(String key, String langCode, HashMap summaryConfigValuesForKey) throws Exception + { + + SortOption sortOption = SortOption.parse((String)summaryConfigValuesForKey.get("order")); + return new SummaryComparator(sortOption, Type.parse((String)summaryConfigValuesForKey.get("type")), langCode, (Element)summaryConfigValuesForKey.get("typeConfig")); + } + + //-------------------------------------------------------------------------------- + + private static HashMap> prepareSummaryMaps(Set indexKeys) throws Exception + { + HashMap> summaryMaps = new HashMap>(); + for (String key : indexKeys) { + summaryMaps.put(key, new HashMap()); + } + return summaryMaps; + } + + //-------------------------------------------------------------------------------- + + private static HashMap> buildSummaryMaps(Element elSummary, IndexReader reader, ScoreDoc[] sdocs, final HashMap> summaryMaps) { + elSummary.setAttribute("hitsusedforsummary", sdocs.length+""); + + FieldSelector keySelector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (summaryMaps.get(name) != null) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; + + for ( int i = 0; i < sdocs.length; i++) { + ScoreDoc sdoc = sdocs[i]; + Document doc = null; + try { + doc = reader.document(sdoc.doc, keySelector); + } catch (Exception e) { + Log.error(Geonet.SEARCH_ENGINE, e.getMessage()+" Caused Failure to get document "+sdoc.doc); + e.printStackTrace(); + } + + for ( String key : summaryMaps.keySet() ) { + HashMap summary = summaryMaps.get(key); + String hits[] = doc.getValues(key); + if (hits != null) { + for (int j = 0; j < hits.length; j++) { + String info = hits[j]; + Integer catCount = (Integer) summary.get(info); + if (catCount == null) { + catCount = new Integer(1); + } else { + catCount = new Integer(catCount.intValue() + 1); + } + summary.put(info, catCount); + } + } + } + } + + return summaryMaps; + } + + //-------------------------------------------------------------------------------- + + private static Element addSortedSummaryKeys(Element elSummary, String langCode, HashMap> summaryMaps, HashMap> summaryConfigValues) throws Exception { - private static void summarize(Element elSummary, Hits phits, String indexKey, int count, String rootElemName, String elemName, - SummaryComparator summaryComparator, int max) throws CorruptIndexException, IOException - { + for ( String indexKey : summaryMaps.keySet() ) { + HashMap summaryConfigValuesForKey = summaryConfigValues.get(indexKey); + Element rootElem = new Element((String)summaryConfigValuesForKey.get("plural")); + // sort according to frequency + SummaryComparator summaryComparator = getSummaryComparator(indexKey, langCode, summaryConfigValuesForKey); + HashMap summary = summaryMaps.get(indexKey); + Log.debug(Geonet.SEARCH_ENGINE, "Sorting "+summary.size()+" according to frequency of " + indexKey); - HashMap summary = new HashMap(); - for (int i = 0; i < count; i++) { - Document doc = phits.doc(i); - String hits[] = doc.getValues(indexKey); - if (hits != null) // if there are no categories lucene returns null - // instead of an empty array - for (int j = 0; j < hits.length; j++) { - String info = hits[j]; - Integer catCount = (Integer) summary.get(info); - if (catCount == null) - catCount = new Integer(1); - else - catCount = new Integer(catCount.intValue() + 1); - summary.put(info, catCount); - } - } + TreeSet> sortedSummary = new TreeSet>(summaryComparator); + sortedSummary.addAll(summary.entrySet()); - Element rootElem = new Element(rootElemName); - // sort according to frequency - TreeSet> sortedSummary = new TreeSet>(summaryComparator); - sortedSummary.addAll(summary.entrySet()); + Integer max = (Integer)summaryConfigValuesForKey.get("max"); - int nKeys = 0; - for (Iterator iter = sortedSummary.iterator(); iter.hasNext();) { - if (++nKeys > max) - break; + int nKeys = 0; + for (Iterator iter = sortedSummary.iterator(); iter.hasNext();) { + if (++nKeys > max.intValue()) break; - Map.Entry me = (Map.Entry) iter.next(); - String keyword = (String) me.getKey(); - Integer keyCount = (Integer) me.getValue(); + Map.Entry me = (Map.Entry) iter.next(); + String keyword = (String) me.getKey(); + Integer keyCount = (Integer) me.getValue(); - Element childElem = new Element(elemName); - childElem.setAttribute("count", keyCount.toString()); - childElem.setAttribute("name", keyword); - rootElem.addContent(childElem); - } - elSummary.addContent(rootElem); - } + Element childElem = new Element((String)summaryConfigValuesForKey.get("name")); + childElem.setAttribute("count", keyCount.toString()); + childElem.setAttribute("name", keyword); + rootElem.addContent(childElem); + } + elSummary.addContent(rootElem); + } + return elSummary; + } + //-------------------------------------------------------------------------------- + public static Pair doSearchAndMakeSummary(int numHits, int maxSummaryKeys, String langCode, String resultType, Element summaryConfig, IndexReader reader, Query query, CachingWrapperFilter cFilter, Sort sort, boolean buildSummary) throws Exception + { + + Log.debug(Geonet.SEARCH_ENGINE, "Setting up the TFC with numHits "+numHits); + TopFieldCollector tfc = TopFieldCollector.create(sort, numHits, true, false, false, false); + + new IndexSearcher(reader).search(query, cFilter, tfc); + + Element elSummary= new Element("summary"); + elSummary.setAttribute("count", tfc.getTotalHits()+""); + elSummary.setAttribute("type", "local"); + + if (buildSummary) { + Log.debug(Geonet.SEARCH_ENGINE, "building summary"); + + // -- prepare + HashMap> summaryConfigValues = getSummaryConfig(summaryConfig, resultType, maxSummaryKeys); + HashMap> summaryMaps = prepareSummaryMaps(summaryConfigValues.keySet()); + + // -- get topdocs from search + TopDocs tdocs = tfc.topDocs(0, numHits); + + // -- add summary keys to summary element + summaryMaps = buildSummaryMaps(elSummary, reader, tdocs.scoreDocs, summaryMaps); + elSummary = addSortedSummaryKeys(elSummary, langCode, summaryMaps, summaryConfigValues); + } + + + return Pair.read(tfc,elSummary); + } + + //-------------------------------------------------------------------------------- + public static Element getMetadataFromIndex(Document doc, String id) { String root = doc.get("_root"); @@ -624,10 +739,10 @@ } //-------------------------------------------------------------------------------- - /** *

* Gets results in current searcher + * WARNING: will use lots of memory for large result sets! *

* * @return current searcher result in "fast" mode @@ -635,29 +750,71 @@ * @throws IOException * @throws CorruptIndexException */ - public Element getAll() throws CorruptIndexException, IOException { + public Element getAll(int maxHits) throws Exception { + performQuery(maxHits); + Element response = new Element("response"); - if (_lastHits == null || _lastHits.length() == 0) { + if (_hits == null || _hits.getTotalHits() == 0) { response.setAttribute("from", 0 + ""); response.setAttribute("to", 0 + ""); return response; } + TopDocs tdocs = _hits.topDocs(0, maxHits); response.setAttribute("from", 1 + ""); - response.setAttribute("to", _lastHits.length() + ""); + response.setAttribute("to", tdocs.scoreDocs.length + ""); - for (int i = 0; i < _lastHits.length(); i++) { - Document doc = _lastHits.doc(i); + for ( ScoreDoc sdoc : tdocs.scoreDocs ) { + Document doc = _reader.document(sdoc.doc); // no selector here!; String id = doc.get("_id"); // FAST mode Element md = getMetadataFromIndex(doc, id); response.addContent(md); } + + setValid(false); return response; } + //-------------------------------------------------------------------------------- /** + *

+ * Gets all metadata uuids in current searcher + *

+ * + * @return current searcher result in "fast" mode + * + * @throws IOException + * @throws CorruptIndexException + */ + public List getAllUuids(int maxHits) throws Exception { + + FieldSelector uuidselector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_uuid")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; + + performQuery(maxHits); + + List response = new ArrayList(); + + TopDocs tdocs = _hits.topDocs(0, maxHits); + + for ( ScoreDoc sdoc : tdocs.scoreDocs ) { + Document doc = _reader.document(sdoc.doc, uuidselector); + String uuid = doc.get("_uuid"); + if (uuid != null) response.add(uuid); + } + + setValid(false); + return response; + } + + //-------------------------------------------------------------------------------- + /** * Search in Lucene index and return Lucene index * field value. Metadata records is retrieved * based on its uuid. @@ -669,30 +826,32 @@ */ public static String getMetadataFromIndex(String appPath, String id, String fieldname, String languageCode) throws Exception { - String value = ""; - File luceneDir = new File(appPath + "WEB-INF/lucene/nonspatial"); - IndexReader reader = IndexReader.open(luceneDir); - Searcher searcher = new IndexSearcher(reader); + List fieldnames = new ArrayList(); + fieldnames.add(fieldname); + return getMetadataFromIndex(appPath, id, fieldnames, languageCode).get(fieldname); + } + + public static Map getMetadataFromIndex(String appPath, String id, List fieldnames, String languageCode) throws Exception + { + + MapFieldSelector selector = new MapFieldSelector(fieldnames); + + File luceneDir = new File(appPath + "WEB-INF/lucene/nonspatial"); + IndexReader reader = IndexReader.open(luceneDir); + Searcher searcher = new IndexSearcher(reader); + + Map values = new HashMap(); try { - TermQuery query = new TermQuery(new Term("_uuid", id)); - Hits hits = searcher.search(query); + TermQuery query = new TermQuery(new Term("_uuid", id)); + TopDocs tdocs = searcher.search(query,1); - for (int j=0; j fields = doc.getFields(); - - for (Iterator i = fields.iterator(); i.hasNext();) { - Field field = i.next(); - String name = field.name(); - - if (name.equals(fieldname)) { - value = field.stringValue(); - break; - // TODO : handle multiple fields ? - } + for ( String fieldname : fieldnames ) { + values.put(fieldname, doc.get(fieldname)); } } @@ -709,8 +868,8 @@ reader.close(); } - return value; - } + return values; + } } //============================================================================== Index: src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java =================================================================== --- src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java (revision 0) +++ src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java (revision 0) @@ -0,0 +1,90 @@ +package org.fao.geonet.kernel.search; + +import java.io.File; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import jeeves.utils.Log; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; + +import org.fao.geonet.constants.Geonet; + +/* Lucene only allows one IndexWriter to be open at a time. + However, multiple threads can use this single IndexWriter. + This class manages a global IndexWriter and uses reference counting to + determine when it can be closed. */ + +public class LuceneIndexWriterFactory { + + protected IndexWriter _writer; + protected int _count; + private File _luceneDir; + private PerFieldAnalyzerWrapper _analyzer; + + // true iff optimization is in progress + private boolean _optimizing = false; + private Object _mutex = new Object(); + + + public LuceneIndexWriterFactory(File luceneDir, PerFieldAnalyzerWrapper analyzer) { + _luceneDir = luceneDir; + _analyzer = analyzer; + } + + public synchronized void openWriter() throws Exception { + if (_count == 0) { + _writer = new IndexWriter(_luceneDir, _analyzer, false); + _writer.setRAMBufferSizeMB(48.0d); + // 48MB seems to be plenty for running at least two long + // indexing jobs (eg. importing 20,000 records) and keeping disk + // activity for lucene index writing to a minimum - should be a config + // option + } + _count++; + Log.info(Geonet.INDEX_ENGINE, "Opening Index_writer, ref count "+_count+" ram in use "+_writer.ramSizeInBytes()+" docs buffered "+_writer.numRamDocs()); + } + + public synchronized boolean isOpen() { + if (_count > 0) return true; + else return false; + } + + public synchronized void closeWriter() throws Exception { + + // lower reference count, close if count reaches zero + if (_count > 0) { + _count--; + Log.info(Geonet.INDEX_ENGINE, "Closing Index_writer, ref _count "+_count+" ram in use "+_writer.ramSizeInBytes()+" docs buffered "+_writer.numRamDocs()); + if (_count==0) _writer.close(); + } + } + + public void addDocument(Document doc) throws Exception { + _writer.addDocument(doc); + } + + public void deleteDocuments(Term term) throws Exception { + _writer.deleteDocuments(term); + } + + public void optimize() throws Exception { + if (_optimizing) return; + synchronized (_mutex) { + _optimizing = true; + Log.debug(Geonet.INDEX_ENGINE,"Optimizing the Lucene Index..."); + _writer.optimize(); + Log.debug(Geonet.INDEX_ENGINE,"Optimizing Done."); + _optimizing = false; + } + return; + } + + + +} Index: src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java =================================================================== --- src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java (revision 5897) +++ src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java (working copy) @@ -26,46 +26,37 @@ import java.io.File; import java.io.IOException; import java.io.StringReader; -import java.net.URI; -import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; +import javax.xml.parsers.ParserConfigurationException; + import jeeves.utils.Log; import jeeves.utils.Xml; -import org.geotools.data.DefaultTransaction; +import org.fao.geonet.constants.Geonet; +import org.geotools.data.DataStore; import org.geotools.data.FeatureSource; import org.geotools.data.FeatureStore; import org.geotools.data.Transaction; -import org.geotools.data.shapefile.indexed.IndexType; -import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; import org.geotools.factory.CommonFactoryFinder; import org.geotools.factory.GeoTools; -import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureCollections; import org.geotools.feature.FeatureIterator; -import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; -import org.geotools.feature.simple.SimpleFeatureTypeBuilder; -import org.geotools.referencing.CRS; -import org.geotools.referencing.crs.DefaultGeographicCRS; -import org.geotools.xml.Configuration; import org.geotools.xml.Parser; import org.jdom.Element; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; -import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory2; import org.opengis.filter.identity.FeatureId; -import org.opengis.referencing.crs.CoordinateReferenceSystem; +import org.xml.sax.SAXException; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; @@ -77,41 +68,26 @@ /** * This class is responsible for extracting geographic information from metadata * and writing that information to a storage mechanism. - * + * * @author jeichar */ +@SuppressWarnings("unchecked") public class SpatialIndexWriter { static final String IDS_ATTRIBUTE_NAME = "id"; static final String GEOM_ATTRIBUTE_NAME = "the_geom"; - static final String SPATIAL_INDEX_FILENAME = "spatialIndex"; + static final String SPATIAL_INDEX_TYPENAME = "spatialindex"; private static final int MAX_WRITES_IN_TRANSACTION = 5000; private final Parser _parser; private final Transaction _transaction; - private final SimpleFeatureType _schema; - private final File _file; private final Lock _lock; - private FeatureStore _featureStore; + private FeatureStore _featureStore; private STRtree _index; private static int _writes; - public SpatialIndexWriter(String indexBasedir, Configuration config) - throws Exception - { - this(indexBasedir, new Parser(config), - new DefaultTransaction("SpatialIndexWriter"), - new ReentrantLock()); - } - - public SpatialIndexWriter(String indexBasedir, Configuration config, - Transaction transaction) throws Exception - { - this(indexBasedir, new Parser(config), transaction, new ReentrantLock()); - } - - public SpatialIndexWriter(String indexBasedir, Parser parser, + public SpatialIndexWriter(DataStore datastore, Parser parser, Transaction transaction, Lock lock) throws Exception { // Note: The Configuration takes a long time to create so it is worth @@ -120,18 +96,16 @@ _parser = parser; _parser.setStrict(false); _parser.setValidating(false); - _file = createDataStoreFile(indexBasedir); _transaction = transaction; - _schema = createSchema(); - _featureStore = createFeatureStore(); + _featureStore = createFeatureStore(datastore); _featureStore.setTransaction(_transaction); } /** * Add a metadata record to the index - * + * * @param schemasDir * the base directory that contains the different metadata * schemas @@ -146,16 +120,21 @@ _lock.lock(); try { _index = null; - Geometry[] extractGeometriesFrom = extractGeometriesFrom( + Geometry geometry = extractGeometriesFrom( schemasDir, type, metadata, _parser); - if (extractGeometriesFrom.length > 0) { + if (geometry != null) { FeatureCollection features = FeatureCollections.newCollection(); - for (Geometry geometry : extractGeometriesFrom) { - Object[] data = { geometry, id }; - features.add(SimpleFeatureBuilder.build(_schema, data, - SimpleFeatureBuilder.createDefaultFeatureId())); + Object[] data; + SimpleFeatureType schema = _featureStore.getSchema(); + if(schema.getDescriptor(0) == schema.getGeometryDescriptor()){ + data = new Object[] { geometry, id }; + } else { + data = new Object[] { id, geometry}; } + + features.add(SimpleFeatureBuilder.build(schema, data, + SimpleFeatureBuilder.createDefaultFeatureId())); _featureStore.addFeatures(features); @@ -187,20 +166,6 @@ } } - public void delete() throws IOException - { - _lock.lock(); - try { - if (_featureStore.getTransaction() != Transaction.AUTO_COMMIT) { - close(); - } - _index = null; - delete(_file.getParentFile()); - } finally { - _lock.unlock(); - } - } - public FeatureSource getFeatureSource() { return _featureStore; @@ -249,7 +214,7 @@ if (_index==null) { populateIndex(); } - return _index; + return _index; } finally { _lock.unlock(); } @@ -261,13 +226,11 @@ */ public void reset() throws Exception { - _lock.lock(); try { _featureStore.setTransaction(Transaction.AUTO_COMMIT); _index=null; - delete(_file); - _featureStore=createFeatureStore(); + _featureStore.removeFeatures(Filter.INCLUDE); _featureStore.setTransaction(_transaction); }finally{ _lock.unlock(); @@ -278,10 +241,9 @@ * Extracts a Geometry Collection from metadata default visibility for * testing access. */ - static MultiPolygon[] extractGeometriesFrom(String schemasDir, String type, + static MultiPolygon extractGeometriesFrom(String schemasDir, String type, Element metadata, Parser parser) throws Exception { - org.geotools.util.logging.Logging.getLogger("org.geotools.xml") .setLevel(Level.SEVERE); File schemaDir = new File(schemasDir, type); @@ -289,41 +251,66 @@ .getAbsolutePath(); Element transform = Xml.transform(metadata, sSheet); if (transform.getChildren().size() == 0) { - return new MultiPolygon[0]; + return null; } - String gml = Xml.getString(transform); + List allPolygons = new ArrayList(); + for (Element geom : (List)transform.getChildren()) { + String gml = Xml.getString(geom); - try { - Object value = parser.parse(new StringReader(gml)); - if (value instanceof HashMap) { - HashMap map = (HashMap) value; - List geoms = new ArrayList(); - for (Object entry : map.values()) { - addToList(geoms, entry); + try { + MultiPolygon jts = parseGml(parser, gml); + for (int i = 0; i < jts.getNumGeometries(); i++) { + allPolygons.add((Polygon) jts.getGeometryN(i)); } - if( geoms.isEmpty() ){ - return new MultiPolygon[0]; - } else if( geoms.size()>1 ){ - GeometryFactory factory = geoms.get(0).getFactory(); - return new MultiPolygon[]{factory.createMultiPolygon(geoms.toArray(new Polygon[0]))}; - } else { - return new MultiPolygon[]{toMultiPolygon(geoms.get(0))}; - } + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Failed to convert gml to jts object: "+gml+"\n\t"+e.getMessage()); + e.printStackTrace(); + // continue + } + } - } else if (value == null) { - return new MultiPolygon[0]; + if( allPolygons.isEmpty()){ + return null; + }else{ + try { + Polygon[] array = new Polygon[allPolygons.size()]; + GeometryFactory geometryFactory = allPolygons.get(0).getFactory(); + return geometryFactory.createMultiPolygon(allPolygons.toArray(array)); + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Failed to create a MultiPolygon from: "+allPolygons); + // continue + return null; + } + } + } + + public static MultiPolygon parseGml(Parser parser, String gml) throws IOException, SAXException, + ParserConfigurationException + { + Object value = parser.parse(new StringReader(gml)); + if (value instanceof HashMap) { + HashMap map = (HashMap) value; + List geoms = new ArrayList(); + for (Object entry : map.values()) { + addToList(geoms, entry); + } + if( geoms.isEmpty() ){ + return null; + } else if( geoms.size()>1 ){ + GeometryFactory factory = geoms.get(0).getFactory(); + return factory.createMultiPolygon(geoms.toArray(new Polygon[0])); } else { - return new MultiPolygon[] { toMultiPolygon((Geometry) value) }; + return toMultiPolygon(geoms.get(0)); } - } catch (Exception e) { - return new MultiPolygon[0]; + + } else if (value == null) { + return null; + } else { + return toMultiPolygon((Geometry) value); } } - /** - * @see SpatialIndexWriter#extractGeometriesFrom(String, String, Element) - */ - private static void addToList(List geoms, Object entry) + public static void addToList(List geoms, Object entry) { if (entry instanceof Polygon) { geoms.add((Polygon) entry); @@ -335,16 +322,6 @@ } } - private static void delete(File parentFile) - { - if (parentFile.isDirectory()) { - for (File file : parentFile.listFiles()) { - delete(file); - } - } - parentFile.delete(); - } - private void populateIndex() throws IOException { _index = new STRtree(); @@ -362,63 +339,25 @@ } } - private SimpleFeatureType createSchema() throws SchemaException + private FeatureStore createFeatureStore(DataStore datastore) throws Exception { - SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); - AttributeDescriptor geomDescriptor = new AttributeTypeBuilder().crs( - DefaultGeographicCRS.WGS84).binding(MultiPolygon.class) - .buildDescriptor("the_geom"); - builder.setName(SPATIAL_INDEX_FILENAME); - builder.add(geomDescriptor); - builder.add(IDS_ATTRIBUTE_NAME, String.class); - return builder.buildFeatureType(); - } + DataStore ds = datastore; - public static File createDataStoreFile(String indexBasedir) - { - return new File(indexBasedir + "/" + SPATIAL_INDEX_FILENAME + ".shp"); - // return new File(indexBasedir + "/" + SPATIAL_INDEX_FILENAME + "/" - // + SPATIAL_INDEX_FILENAME + ".h2"); + return (FeatureStore) ds.getFeatureSource(SPATIAL_INDEX_TYPENAME); } - /** - * Create a Shapefile datastore in WGS84. - * - * @return - * @throws Exception - */ - private FeatureStore createFeatureStore() throws Exception + public static MultiPolygon toMultiPolygon(Geometry geometry) { - _file.getParentFile().mkdirs(); - - IndexedShapefileDataStore ds = new IndexedShapefileDataStore(_file - .toURI().toURL(), new URI("http://geonetwork.org"), false, - true, IndexType.NONE, Charset.defaultCharset()); - CoordinateReferenceSystem crs = CRS.decode("EPSG:4326"); - - if (crs != null) { - ds.forceSchemaCRS(crs); - } - - if (!_file.exists()) { - ds.createSchema(_schema); - } - - return (FeatureStore) ds.getFeatureSource(_schema.getTypeName()); - } - - private static MultiPolygon toMultiPolygon(Geometry geometry) - { - if (geometry instanceof Polygon) { + if (geometry instanceof Polygon) { Polygon polygon = (Polygon) geometry; - + return geometry.getFactory().createMultiPolygon( new Polygon[] { polygon }); }else if (geometry instanceof MultiPolygon) { return (MultiPolygon) geometry; } String message = geometry.getClass()+" cannot be converted to a polygon. Check Metadata"; - Log.error("SpatialIndexWriter", message); + Log.error(Geonet.INDEX_ENGINE, message); throw new IllegalArgumentException(message); } Index: src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java =================================================================== --- src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java (revision 5897) +++ src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java (working copy) @@ -12,7 +12,8 @@ import jeeves.utils.Log; import org.apache.lucene.document.Document; -import org.apache.lucene.document.SetBasedFieldSelector; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.Filter; import org.apache.lucene.search.HitCollector; @@ -53,7 +54,7 @@ SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.add(SpatialIndexWriter.GEOM_ATTRIBUTE_NAME, Geometry.class,DefaultGeographicCRS.WGS84); builder.setDefaultGeometry(SpatialIndexWriter.GEOM_ATTRIBUTE_NAME); - builder.setName(SpatialIndexWriter.SPATIAL_INDEX_FILENAME); + builder.setName(SpatialIndexWriter.SPATIAL_INDEX_TYPENAME); FEATURE_TYPE = builder.buildFeatureType(); } @@ -64,7 +65,7 @@ protected final FilterFactory2 _filterFactory; protected final Query _query; - protected final SetBasedFieldSelector _selector; + protected final FieldSelector _selector; private org.opengis.filter.Filter _spatialFilter; private Map _unrefinedMatches; private boolean warned = false; @@ -79,10 +80,12 @@ _filterFactory = CommonFactoryFinder.getFilterFactory2(GeoTools .getDefaultHints()); - Set fieldsToLoad = new HashSet(); - fieldsToLoad.add("_id"); - Set lazyFieldsToLoad = Collections.emptySet(); - _selector = new SetBasedFieldSelector(fieldsToLoad, lazyFieldsToLoad); + _selector = new FieldSelector() { + public final FieldSelectorResult accept(String name) { + if (name.equals("_id")) return FieldSelectorResult.LOAD; + else return FieldSelectorResult.NO_LOAD; + } + }; } protected SpatialFilter(Query query, Envelope bounds, @@ -105,7 +108,7 @@ { Document document; try { - document = reader.document(doc); + document = reader.document(doc, _selector); String key = document.get("_id"); FeatureId featureId = unrefinedSpatialMatches.get(key); if (featureId!=null) { @@ -207,4 +210,4 @@ "createGeomFilter must be overridden if createFilter is not overridden"); } -} \ No newline at end of file +} Index: src/org/fao/geonet/kernel/mef/Importer.java =================================================================== --- src/org/fao/geonet/kernel/mef/Importer.java (revision 5897) +++ src/org/fao/geonet/kernel/mef/Importer.java (working copy) @@ -53,6 +53,12 @@ public static List doImport(final Element params, final ServiceContext context, File mefFile, final String stylePath) throws Exception { + return doImport(params, context, mefFile, stylePath, false); + } + + public static List doImport(final Element params, + final ServiceContext context, File mefFile, final String stylePath, + final boolean indexGroup) throws Exception { final GeonetContext gc = (GeonetContext) context .getHandlerContext(Geonet.CONTEXT_NAME); final DataManager dm = gc.getDataManager(); @@ -304,8 +310,8 @@ dbms.execute("UPDATE Metadata SET popularity=? WHERE id=?", new Integer(popularity), iId); - dm.setTemplate(dbms, iId, isTemplate, null); - dm.setHarvested(dbms, iId, null); + dm.setTemplateExt(dbms, iId, isTemplate, null); + dm.setHarvestedExt(dbms, iId, null); String pubDir = Lib.resource.getDir(context, "public", id .get(index)); @@ -323,7 +329,11 @@ else addOperations(dm, dbms, privileges, id.get(index), groupId); - dm.indexMetadata(dbms, id.get(index)); + if (indexGroup) { + dm.indexMetadataGroup(dbms, id.get(index)); + } else { + dm.indexMetadata(dbms, id.get(index)); + } } // -------------------------------------------------------------------- Index: src/org/fao/geonet/kernel/mef/MEFLib.java =================================================================== --- src/org/fao/geonet/kernel/mef/MEFLib.java (revision 5897) +++ src/org/fao/geonet/kernel/mef/MEFLib.java (working copy) @@ -148,6 +148,11 @@ V2 } + public static List doImportIndexGroup(Element params, ServiceContext context, File mefFile, String stylePath) throws Exception { + return Importer.doImport(params, context, mefFile, stylePath, true); + } + + // -------------------------------------------------------------------------- public static List doImport(Element params, ServiceContext context, File mefFile, String stylePath) throws Exception { Index: src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java (working copy) @@ -151,7 +151,7 @@ if (!dataMan.existsSchema(ri.schema)) { log.debug(" - Metadata skipped due to unknown schema. uuid:"+ ri.uuid - +", schema:"+ ri.schema); + +", schema:"+ ri.schema); result.unknownSchema++; } else @@ -273,8 +273,8 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, isTemplate, null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, isTemplate, null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); if(!localRating) { String rating = general.getChildText("rating"); @@ -295,7 +295,7 @@ addPrivileges(id, info.getChild("privileges")); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.addedMetadata++; return id; @@ -553,7 +553,7 @@ addPrivileges(id, info.getChild("privileges")); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } //-------------------------------------------------------------------------- Index: src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java (working copy) @@ -100,38 +100,43 @@ //----------------------------------------------------------------------- //--- insert/update new metadata - for(Iterator i=mdList.iterator(); i.hasNext(); ) - { - Element info = ((Element) i.next()).getChild("info", Edit.NAMESPACE); + dataMan.startIndexGroup(); + try { + for(Iterator i=mdList.iterator(); i.hasNext(); ) + { + Element info = ((Element) i.next()).getChild("info", Edit.NAMESPACE); - String remoteId = info.getChildText("id"); - String remoteUuid= info.getChildText("uuid"); - String schema = info.getChildText("schema"); - String changeDate= info.getChildText("changeDate"); + String remoteId = info.getChildText("id"); + String remoteUuid= info.getChildText("uuid"); + String schema = info.getChildText("schema"); + String changeDate= info.getChildText("changeDate"); - this.result.totalMetadata++; + this.result.totalMetadata++; - log.debug("Obtained remote id="+ remoteId +", changeDate="+ changeDate); + log.debug("Obtained remote id="+ remoteId +", changeDate="+ changeDate); - if (!dataMan.existsSchema(schema)) - { - log.debug(" - Skipping unsupported schema : "+ schema); - this.result.schemaSkipped++; - } - else - { - String id = dataMan.getMetadataId(dbms, remoteUuid); + if (!dataMan.existsSchema(schema)) + { + log.debug(" - Skipping unsupported schema : "+ schema); + this.result.schemaSkipped++; + } + else + { + String id = dataMan.getMetadataId(dbms, remoteUuid); - if (id == null) id = addMetadata(siteId, info); + if (id == null) id = addMetadata(siteId, info); else updateMetadata(siteId, info, id); - dbms.commit(); + dbms.commit(); - //--- maybe the metadata was unretrievable + //--- maybe the metadata was unretrievable - if (id != null) - dataMan.indexMetadata(dbms, id); + if (id != null) + dataMan.indexMetadataGroup(dbms, id); + } } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for site-id="+ siteId); Index: src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java (working copy) @@ -152,7 +152,7 @@ String id = dataMan.getMetadataId(dbms, ri.uuid); if (id == null) addMetadata(ri); - else updateMetadata(ri, id); + else updateMetadata(ri, id); } log.info("End of alignment for : "+ params.name); @@ -190,14 +190,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.addedMetadata++; } @@ -291,7 +291,7 @@ addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updatedMetadata++; } } Index: src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java (working copy) @@ -183,6 +183,7 @@ List idsForHarvestingResult = new ArrayList(); //----------------------------------------------------------------------- //--- insert/update new metadata + for(String xmlFile : results) { result.total++; Element xml = null; @@ -205,9 +206,8 @@ result.unknownSchema++; } else { - String uuid = Common.retrieveUUID(xml, schema); - System.out.println("extracted uuid: " + uuid); - if(uuid == null) { + String uuid = dataMan.extractUUID(schema, xml); + if(uuid == null || uuid.equals("")) { result.badFormat++; } else { @@ -226,6 +226,7 @@ } } } + if(!params.nodelete) { // // delete locally existing metadata from the same source if they were @@ -255,7 +256,7 @@ addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } @@ -278,14 +279,14 @@ createDate, uuid, 1, null); int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, source); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, source); addPrivileges(id, localGroups, dbms); addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); return id; } Index: src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java (working copy) @@ -37,7 +37,6 @@ import org.fao.geonet.arcgis.ArcSDEMetadataAdapter; import org.fao.geonet.constants.Geonet; -import org.fao.geonet.kernel.harvest.Common; import org.fao.geonet.kernel.harvest.harvester.AbstractHarvester; import org.fao.geonet.kernel.harvest.harvester.AbstractParams; import org.fao.geonet.kernel.harvest.harvester.CategoryMapper; @@ -183,8 +182,9 @@ } // the xml is recognizable iso19139 format else { - String uuid = Common.retrieveUUID(iso19139, schema); - if(uuid == null) { + String uuid = dataMan.extractUUID(schema, iso19139); + if(uuid == null || uuid.equals("")) { + System.out.println("Skipping metadata due to failure extracting uuid (uuid null or empty)."); result.badFormat++; } else { @@ -232,7 +232,7 @@ addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } /** * Inserts a metadata into the database. Lucene index is updated after insertion. @@ -253,14 +253,14 @@ createDate, uuid, 1, null); int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, source); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, source); addPrivileges(id, localGroups, dbms); addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); return id; } @@ -358,4 +358,4 @@ public int badFormat; public int doesNotValidate; } -} \ No newline at end of file +} Index: src/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/AbstractHarvester.java (working copy) @@ -34,6 +34,7 @@ import jeeves.utils.Log; import org.fao.geonet.constants.Geonet; import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.MetadataIndexerProcessor; import org.fao.geonet.kernel.harvest.Common.OperResult; import org.fao.geonet.kernel.harvest.Common.Status; import org.fao.geonet.kernel.harvest.harvester.arcsde.ArcSDEHarvester; @@ -354,6 +355,23 @@ //--- //--------------------------------------------------------------------------- + // Nested class to handle harvesting with fast indexing + public class HarvestWithIndexProcessor extends MetadataIndexerProcessor { + ResourceManager rm; + Logger logger; + + public HarvestWithIndexProcessor(DataManager dm, Logger logger, ResourceManager rm) { + super(dm); + this.logger = logger; + this.rm = rm; + } + + @Override + public void process() throws Exception { + doHarvest(logger, rm); + } + } + void harvest() { ResourceManager rm = new ResourceManager(context.getProviderManager()); @@ -376,7 +394,8 @@ //--- proper harvesting logger.info("Started harvesting from node : "+ nodeName); - doHarvest(logger, rm); + HarvestWithIndexProcessor h = new HarvestWithIndexProcessor(dataMan, logger, rm); + h.processWithFastIndexing(); logger.info("Ended harvesting from node : "+ nodeName); if (getParams().oneRunOnly) Index: src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java (working copy) @@ -108,6 +108,7 @@ } //----------------------------------------------------------------------- //--- insert/update new metadata + for(RemoteFile rf : files) { result.total++; String id = localUris.getID(rf.getPath()); @@ -158,14 +159,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid, rf.getPath()); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid, rf.getPath()); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.added++; } @@ -285,7 +286,7 @@ dbms.execute("DELETE FROM MetadataCateg WHERE metadataId=?", Integer.parseInt(id)); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updated++; } } @@ -324,4 +325,4 @@ public boolean isMoreRecentThan(String localDate); } -//============================================================================= \ No newline at end of file +//============================================================================= Index: src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java (working copy) @@ -298,15 +298,15 @@ List layers = xp.selectNodes(capa); log.info(" - Number of layers, featureTypes or Coverages found : " + layers.size()); - + for (Element layer : layers) { WxSLayerRegistry s = addLayerMetadata (layer, capa); if (s != null) layersRegistry.add(s); } - // Update ISO19119 for data/service links creation (ie. operatesOn element) - // The editor will support that but it will make quite heavy XML. + // Update ISO19119 for data/service links creation (ie. operatesOn element) + // The editor will support that but it will make quite heavy XML. md = addOperatesOnUuid (md, layersRegistry); } @@ -325,11 +325,11 @@ addPrivileges(id); addCategories(id); + dataMan.setHarvestedExt(dbms, iId, params.uuid, params.url); dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid, params.url); dbms.commit(); - //dataMan.indexMetadata(dbms, id); setHarvested update the index + //dataMan.indexMetadata(dbms, id); setTemplate update the index result.added ++; @@ -595,11 +595,11 @@ dataMan.setCategory (dbms, reg.id, params.datasetCategory); log.debug(" - Set Harvested."); - dataMan.setHarvested(dbms, iId, params.uuid, params.url); // FIXME : harvestUuid should be a MD5 string + dataMan.setHarvestedExt(dbms, iId, params.uuid, params.url); // FIXME : harvestUuid should be a MD5 string dbms.commit(); - //dataMan.indexMetadata(dbms, reg.id); setHarvested update the index + dataMan.indexMetadataGroup(dbms, reg.id); try { // Load bbox info for later use (eg. WMS thumbnails creation) Index: src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java (working copy) @@ -210,7 +210,7 @@ String id = localUuids.getID(ri.id); if (id == null) addMetadata(t, ri); - else updateMetadata(t, ri, id); + else updateMetadata(t, ri, id); } log.info("End of alignment for : "+ params.name); @@ -252,14 +252,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.added++; } @@ -453,7 +453,7 @@ addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updated++; } } Index: src/org/fao/geonet/kernel/harvest/Common.java =================================================================== --- src/org/fao/geonet/kernel/harvest/Common.java (revision 5897) +++ src/org/fao/geonet/kernel/harvest/Common.java (working copy) @@ -31,27 +31,6 @@ //============================================================================= public class Common { - /** - * Examines an XML element and returns its UUID if it is in a recognized schema format. - * - * @param xml - * @return - */ - public static String retrieveUUID(Element xml, String schema) { - if(schema.equals("iso19139")) { - Element fileIdentifier = xml.getChild("fileIdentifier", Namespace.getNamespace("http://www.isotc211.org/2005/gmd")); - if(fileIdentifier == null) { - return null; - } - else { - return fileIdentifier.getChildText("CharacterString", Namespace.getNamespace("http://www.isotc211.org/2005/gco")); - } - } - // no other schemas supported for now - else { - return null; - } - } //--------------------------------------------------------------------------- //--- Status Index: src/org/fao/geonet/kernel/setting/SettingInfo.java =================================================================== --- src/org/fao/geonet/kernel/setting/SettingInfo.java (revision 5897) +++ src/org/fao/geonet/kernel/setting/SettingInfo.java (working copy) @@ -81,6 +81,19 @@ //--------------------------------------------------------------------------- + public String getSelectionMaxRecords() + { + return sm.getValue("system/selectionmanager/maxrecords"); + } + + //--------------------------------------------------------------------------- + + public String getLuceneIndexOptimizerSchedulerInterval() + { + return sm.getValue("system/indexoptimizer/interval"); + } + + //--------------------------------------------------------------------------- public String getFeedbackEmail() { return sm.getValue("system/feedback/email"); Index: src/org/fao/geonet/services/metadata/MassiveNewOwner.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveNewOwner.java (revision 5897) +++ src/org/fao/geonet/services/metadata/MassiveNewOwner.java (working copy) @@ -130,9 +130,9 @@ dbms.commit(); // -- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); - } + context.info("Re-indexing metadata"); + MassiveOpsMetadataReindexer r = new MassiveOpsMetadataReindexer(dm, dbms, metadata); + r.processWithFastIndexing(); // -- for the moment just return the sizes - we could return the ids // -- at a later stage for some sort of result display Index: src/org/fao/geonet/services/metadata/MassiveOpsMetadataReindexer.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveOpsMetadataReindexer.java (revision 0) +++ src/org/fao/geonet/services/metadata/MassiveOpsMetadataReindexer.java (revision 0) @@ -0,0 +1,48 @@ +//============================================================================== +//=== This program is free software; you can redistribute it and/or modify +//=== it under the terms of the GNU General Public License as published by +//=== the Free Software Foundation; either version 2 of the License, or (at +//=== your option) any later version. +//=== +//=== This program is distributed in the hope that it will be useful, but +//=== WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== General Public License for more details. +//=== +//=== You should have received a copy of the GNU General Public License +//=== along with this program; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +//=== +//=== Contact: Jeroen Ticheler email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.services.metadata; + +import java.util.Set; + +import jeeves.resources.dbms.Dbms; + +import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.MetadataIndexerProcessor; + +/** + * Class that extends MetadataIndexerProcessor to reindex the metadata + * changed in any of the Massive operation services + */ +public class MassiveOpsMetadataReindexer extends MetadataIndexerProcessor { + Set metadata; + Dbms dbms; + + public MassiveOpsMetadataReindexer(DataManager dm, Dbms dbms, Set metadata) { + super(dm); + this.dbms = dbms; + this.metadata = metadata; + } + + @Override + public void process() throws Exception { + for (int mdId : metadata) { + dm.indexMetadataGroup(dbms, Integer.toString(mdId)); + } + } +} Index: src/org/fao/geonet/services/metadata/XslProcessing.java =================================================================== --- src/org/fao/geonet/services/metadata/XslProcessing.java (revision 5897) +++ src/org/fao/geonet/services/metadata/XslProcessing.java (working copy) @@ -49,6 +49,7 @@ import org.fao.geonet.kernel.MdInfo; import org.fao.geonet.kernel.SelectionManager; import org.fao.geonet.services.Utils; +import org.fao.geonet.util.ISODate; import org.jdom.Element; //============================================================================= @@ -95,7 +96,7 @@ try { Element processedMetadata = process(id, process, _appPath, params, - context, metadata, notFound, notOwner, notProcessFound); + context, metadata, notFound, notOwner, notProcessFound, false); if (processedMetadata == null) { throw new BadParameterEx("Processing failed", "Not found:" + notFound.size() + @@ -131,7 +132,7 @@ public static Element process(String id, String process, String appPath, Element params, ServiceContext context, Set metadata, Set notFound, Set notOwner, - Set notProcessFound) throws Exception { + Set notProcessFound, boolean useIndexGroup) throws Exception { GeonetContext gc = (GeonetContext) context .getHandlerContext(Geonet.CONTEXT_NAME); DataManager dataMan = gc.getDataManager(); @@ -173,9 +174,14 @@ xslParameter); // --- save metadata and return status - dataMan.updateMetadata(context.getUserSession(), dbms, id, - processedMetadata, false, null, context.getLanguage()); + dataMan.updateMetadataExt(dbms, id, processedMetadata, new ISODate().toString()); + if (useIndexGroup) { + dataMan.indexMetadataGroup(dbms, id); + } else { + dataMan.indexMetadata(dbms, id); + } + metadata.add(new Integer(id)); return processedMetadata; Index: src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java (revision 5897) +++ src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java (working copy) @@ -131,9 +131,9 @@ } //--- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); - } + context.info("Re-indexing metadata"); + MassiveOpsMetadataReindexer r = new MassiveOpsMetadataReindexer(dm, dbms, metadata); + r.processWithFastIndexing(); // -- for the moment just return the sizes - we could return the ids // -- at a later stage for some sort of result display Index: src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java (revision 5897) +++ src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java (working copy) @@ -116,10 +116,10 @@ dbms.commit(); //--- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); - } - + context.info("Re-indexing metadata"); + MassiveOpsMetadataReindexer r = new MassiveOpsMetadataReindexer(dm, dbms, metadata); + r.processWithFastIndexing(); + // -- for the moment just return the sizes - we could return the ids // -- at a later stage for some sort of result display return new Element(Jeeves.Elem.RESPONSE) Index: src/org/fao/geonet/services/metadata/MassiveXslProcessing.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveXslProcessing.java (revision 5897) +++ src/org/fao/geonet/services/metadata/MassiveXslProcessing.java (working copy) @@ -46,6 +46,7 @@ import org.fao.geonet.kernel.AccessManager; import org.fao.geonet.kernel.DataManager; import org.fao.geonet.kernel.MdInfo; +import org.fao.geonet.kernel.MetadataIndexerProcessor; import org.fao.geonet.kernel.SelectionManager; import org.fao.geonet.services.Utils; import org.jdom.Element; @@ -109,14 +110,8 @@ SelectionManager sm = SelectionManager.getManager(session); synchronized(sm.getSelection("metadata")) { - for (Iterator iter = sm.getSelection("metadata").iterator(); iter - .hasNext();) { - String uuid = (String) iter.next(); - String id = dataMan.getMetadataId(dbms, uuid); - context.info("Processing metadata with id:" + id); - - XslProcessing.process(id, process, _appPath, params, context, metadata, notFound, notOwner, notProcessFound); - } + MassiveXslMetadataReindexer m = new MassiveXslMetadataReindexer(dataMan, dbms, sm.getSelection("metadata").iterator(), process, _appPath, params, context, metadata, notFound, notOwner, notProcessFound); + m.processWithFastIndexing(); } @@ -136,6 +131,41 @@ // --- Private methods // --- // -------------------------------------------------------------------------- + + class MassiveXslMetadataReindexer extends MetadataIndexerProcessor { + Dbms dbms; + Iterator iter; + String process; + String appPath; + Element params; + ServiceContext context; + Set metadata, notFound, notOwner, notProcessFound; + + public MassiveXslMetadataReindexer(DataManager dm, Dbms dbms, Iterator iter, String process, String appPath, Element params, ServiceContext context, Set metadata, Set notFound, Set notOwner, Set notProcessFound) { + super(dm); + this.dbms = dbms; + this.iter = iter; + this.process = process; + this.appPath = appPath; + this.params = params; + this.context = context; + this.metadata = metadata; + this.notFound = notFound; + this.notOwner = notOwner; + this.notProcessFound = notProcessFound; + } + + @Override + public void process() throws Exception { + while (iter.hasNext()) { + String uuid = (String) iter.next(); + String id = dm.getMetadataId(dbms, uuid); + context.info("Processing metadata with id:" + id); + + XslProcessing.process(id, process, appPath, params, context, metadata, notFound, notOwner, notProcessFound, true); + } + } + } } // ============================================================================= Index: src/org/fao/geonet/services/metadata/ImportFromDir.java =================================================================== --- src/org/fao/geonet/services/metadata/ImportFromDir.java (revision 5897) +++ src/org/fao/geonet/services/metadata/ImportFromDir.java (working copy) @@ -42,6 +42,7 @@ import org.fao.geonet.constants.Params; import org.fao.geonet.exceptions.SchematronValidationErrorEx; import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.MetadataIndexerProcessor; import org.fao.geonet.kernel.mef.MEFLib; import org.jdom.Element; @@ -146,12 +147,42 @@ //-------------------------------------------------------------------------- //--- + //--- ImportMetadataReindexer class (used in standardImport) + //--- + //-------------------------------------------------------------------------- + + public class ImportMetadataReindexer extends MetadataIndexerProcessor { + Element params; + File files[]; + String stylePath; + ServiceContext context; + + public ImportMetadataReindexer(DataManager dm, Element params, ServiceContext context, File files[], String stylePath) { + super (dm); + this.params = params; + this.context = context; + this.files = files; + this.stylePath = stylePath; + } + + public void process() throws Exception { + for(int i=0; i reindex = dbms.select(query).getChildren(); dbms.execute("DELETE FROM OperationAllowed WHERE groupId="+ id); dbms.execute("DELETE FROM UserGroups WHERE groupId="+ id); @@ -71,14 +76,9 @@ GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); DataManager dm = gc.getDataManager(); - for (Object o : reindex) - { - Element md = (Element) o; - String mdId = md.getChildText("metadataid"); + ServiceMetadataReindexer s = new ServiceMetadataReindexer(dm, dbms, reindex); + s.processWithFastIndexing(); - dm.indexMetadata(dbms, mdId); - } - return new Element(Jeeves.Elem.RESPONSE) .addContent(new Element(Jeeves.Elem.OPERATION).setText(Jeeves.Text.REMOVED)); } Index: src/org/fao/geonet/services/config/Set.java =================================================================== --- src/org/fao/geonet/services/config/Set.java (revision 5897) +++ src/org/fao/geonet/services/config/Set.java (working copy) @@ -37,6 +37,8 @@ import org.fao.geonet.GeonetContext; import org.fao.geonet.constants.Geonet; +import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.setting.SettingInfo; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.lib.Lib; import org.jdom.Element; @@ -74,11 +76,37 @@ if (!sm.setValues(dbms, values)) throw new OperationAbortedEx("Cannot set all values"); + doActions(context, values); + return new Element(Jeeves.Elem.RESPONSE).setText("ok"); } //-------------------------------------------------------------------------- //--- + //--- doActions - do any immediate actions resulting from changes to settings + //--- + //-------------------------------------------------------------------------- + + // do any immediate actions resulting from changes to settings + private void doActions(ServiceContext context, Map values) throws Exception { + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + DataManager dataMan = gc.getDataManager(); + + String sMin = (String)values.get("system/indexoptimizer/interval"); + // reset the lucene index optimizer to run at the scheduled interval + try { + int minutes = Integer.parseInt(sMin); + dataMan.rescheduleOptimizer(minutes); + } catch (Exception e) { + e.printStackTrace(); + throw new OperationAbortedEx("Cannot restart Lucene Optimizer with interval "+sMin+": "+e.getMessage()); + } + + // should also restart the Z server? + } + + //-------------------------------------------------------------------------- + //--- //--- Vars //--- //-------------------------------------------------------------------------- @@ -94,6 +122,10 @@ new ConfigEntry(ConfigEntry.Type.STRING, true, "intranet/network", "system/intranet/network"), new ConfigEntry(ConfigEntry.Type.STRING, true, "intranet/netmask", "system/intranet/netmask"), + new ConfigEntry(ConfigEntry.Type.INT, true, "selectionmanager/maxrecords", "system/selectionmanager/maxrecords"), + + new ConfigEntry(ConfigEntry.Type.INT, true, "indexoptimizer/interval", "system/indexoptimizer/interval"), + new ConfigEntry(ConfigEntry.Type.BOOL, true, "z3950/enable", "system/z3950/enable"), new ConfigEntry(ConfigEntry.Type.INT, false, "z3950/port", "system/z3950/port"), Index: src/org/fao/geonet/services/category/Remove.java =================================================================== --- src/org/fao/geonet/services/category/Remove.java (revision 5897) +++ src/org/fao/geonet/services/category/Remove.java (working copy) @@ -23,16 +23,21 @@ package org.fao.geonet.services.category; +import java.util.List; + import jeeves.constants.Jeeves; import jeeves.interfaces.Service; import jeeves.resources.dbms.Dbms; import jeeves.server.ServiceConfig; import jeeves.server.context.ServiceContext; import jeeves.utils.Util; + import org.fao.geonet.GeonetContext; import org.fao.geonet.constants.Geonet; import org.fao.geonet.constants.Params; import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.services.util.ServiceMetadataReindexer; + import org.jdom.Element; //============================================================================= @@ -58,7 +63,7 @@ String query = "SELECT metadataId FROM MetadataCateg WHERE categoryId="+id; - java.util.List reindex = dbms.select(query).getChildren(); + List reindex = dbms.select(query).getChildren(); dbms.execute ("DELETE FROM MetadataCateg WHERE categoryId=" + id); dbms.execute ("DELETE FROM CategoriesDes WHERE idDes=" + id); @@ -69,14 +74,9 @@ GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); DataManager dm = gc.getDataManager(); - for (Object o : reindex) - { - Element md = (Element) o; - String mdId = md.getChildText("metadataid"); + ServiceMetadataReindexer s = new ServiceMetadataReindexer(dm, dbms, reindex); + s.processWithFastIndexing(); - dm.indexMetadata(dbms, mdId); - } - return new Element(Jeeves.Elem.RESPONSE) .addContent(new Element(Jeeves.Elem.OPERATION).setText(Jeeves.Text.REMOVED)); } Index: src/org/fao/geonet/services/util/ServiceMetadataReindexer.java =================================================================== --- src/org/fao/geonet/services/util/ServiceMetadataReindexer.java (revision 0) +++ src/org/fao/geonet/services/util/ServiceMetadataReindexer.java (revision 0) @@ -0,0 +1,52 @@ +//============================================================================== +//=== This program is free software; you can redistribute it and/or modify +//=== it under the terms of the GNU General Public License as published by +//=== the Free Software Foundation; either version 2 of the License, or (at +//=== your option) any later version. +//=== +//=== This program is distributed in the hope that it will be useful, but +//=== WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== General Public License for more details. +//=== +//=== You should have received a copy of the GNU General Public License +//=== along with this program; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +//=== +//=== Contact: Jeroen Ticheler email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.geonet.services.util; + +import java.util.List; + +import jeeves.resources.dbms.Dbms; + +import org.fao.geonet.kernel.DataManager; +import org.fao.geonet.kernel.MetadataIndexerProcessor; + +import org.jdom.Element; + +/** + * Class that extends MetadataIndexerProcessor to reindex the metadata + * changed in any service that processes a list of metadata documents (as JDOM + * Elements) + */ +public class ServiceMetadataReindexer extends MetadataIndexerProcessor { + List reindex; + Dbms dbms; + + public ServiceMetadataReindexer(DataManager dm, Dbms dbms, List reindex) { + super(dm); + this.dbms = dbms; + this.reindex = reindex; + } + + @Override + public void process() throws Exception { + for (Element md : reindex) { + String mdId = md.getChildText("metadataid"); + dm.indexMetadataGroup(dbms, mdId); + } + } +} Index: src/org/fao/geonet/Geonetwork.java =================================================================== --- src/org/fao/geonet/Geonetwork.java (revision 5897) +++ src/org/fao/geonet/Geonetwork.java (working copy) @@ -24,6 +24,12 @@ package org.fao.geonet; import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.HashMap; + import jeeves.interfaces.ApplicationHandler; import jeeves.interfaces.Logger; import jeeves.resources.dbms.Dbms; @@ -44,8 +50,27 @@ import org.fao.geonet.kernel.search.SearchManager; import org.fao.geonet.kernel.setting.SettingManager; import org.fao.geonet.services.util.z3950.Server; + +import org.geotools.data.DataStore; +//import org.geotools.data.oracle.OracleDataStoreFactory; Not until geotools 2.6.1 +import org.geotools.data.postgis.PostgisDataStoreFactory; +import org.geotools.data.shapefile.indexed.IndexType; +import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; +import org.geotools.feature.AttributeTypeBuilder; +import org.geotools.feature.simple.SimpleFeatureBuilder; +import org.geotools.feature.simple.SimpleFeatureTypeBuilder; +import org.geotools.referencing.CRS; +import org.geotools.referencing.crs.DefaultGeographicCRS; + +import org.opengis.feature.simple.SimpleFeature; +import org.opengis.feature.simple.SimpleFeatureType; +import org.opengis.feature.type.AttributeDescriptor; +import org.opengis.referencing.crs.CoordinateReferenceSystem; + import org.jdom.Element; +import com.vividsolutions.jts.geom.MultiPolygon; + //============================================================================= /** This is the main class. It handles http connections and inits the system @@ -56,6 +81,8 @@ private Logger logger; private SearchManager searchMan; private ThesaurusManager thesaurusMan; + private String SPATIAL_INDEX_FILENAME = "spatialindex"; + static final String IDS_ATTRIBUTE_NAME = "id"; //--------------------------------------------------------------------------- //--- @@ -132,12 +159,15 @@ logger.info(" - Search..."); - String luceneDir = handlerConfig.getMandatoryValue(Geonet.Config.LUCENE_DIR); + String luceneDir = path + handlerConfig.getMandatoryValue(Geonet.Config.LUCENE_DIR); String summaryConfigXmlFile = handlerConfig.getMandatoryValue(Geonet.Config.SUMMARY_CONFIG); - - searchMan = new SearchManager(path, luceneDir, summaryConfigXmlFile); + DataStore dataStore = createDataStore(context.getResourceManager().getProps(Geonet.Res.MAIN_DB), luceneDir); + + String optimizerInterval = settingMan.getValue("system/indexoptimizer/interval"); + searchMan = new SearchManager(path, luceneDir, summaryConfigXmlFile, dataStore, optimizerInterval); + //------------------------------------------------------------------------ //--- extract intranet ip/mask and initialize AccessManager @@ -252,6 +282,142 @@ logger.info(" - Z39.50..."); Server.end(); } + + //--------------------------------------------------------------------------- + + private DataStore createDataStore(Map props, String luceneDir) throws Exception { + String url = props.get("url"); + String user = props.get("user"); + String passwd = props.get("password"); + + DataStore ds = null; + try { + if (url.contains("postgis")) { + ds = createPostgisDatastore(user, passwd, url); + } else if (url.contains("oracle")) { + ds = createOracleDatastore(user, passwd, url); + } + } catch (Exception e) { + logger.error("Failed to create datastore for "+url+". Will use shapefile instead."); + logger.error(e.getMessage()); + e.printStackTrace(); + } + + if (ds != null) return ds; + else return createShapefileDatastore(luceneDir); + } + + //--------------------------------------------------------------------------- + + private DataStore createPostgisDatastore(String user, String passwd, String url) throws Exception { + + String[] values = url.split("/"); + + Map params = new HashMap(); + params.put(PostgisDataStoreFactory.DBTYPE.key, PostgisDataStoreFactory.DBTYPE.sample); + params.put(PostgisDataStoreFactory.DATABASE.key, getDatabase(url, values)); + params.put(PostgisDataStoreFactory.USER.key, user); + params.put(PostgisDataStoreFactory.PASSWD.key, passwd); + params.put(PostgisDataStoreFactory.HOST.key, getHost(url, values)); + params.put(PostgisDataStoreFactory.PORT.key, getPort(url, values)); + //logger.info("Connecting using "+params); - don't show unless we need it + + PostgisDataStoreFactory factory = new PostgisDataStoreFactory(); + DataStore ds = factory.createDataStore(params); + logger.info("NOTE: Using POSTGIS for spatial index"); + + return ds; + } + + //--------------------------------------------------------------------------- + + private DataStore createOracleDatastore(String user, String passwd, String url) throws Exception { + + String[] values = url.split(":"); +/* + Map params = new HashMap(); + params.put(OracleDataStoreFactory.DBTYPE.key, OracleDataStoreFactory.DBTYPE.sample); + params.put(OracleDataStoreFactory.DATABASE.key, getDatabase(url, values)); + params.put(OracleDataStoreFactory.USER.key, user); + params.put(OracleDataStoreFactory.PASSWD.key, passwd); + params.put(OracleDataStoreFactory.HOST.key, getHost(url, values)); + params.put(OracleDataStoreFactory.PORT.key, getPort(url, values)); + + OracleDataStoreFactory factory = new OracleDataStoreFactory(); + DataStore ds = factory.createDataStore(params); + + return ds; +*/ + return null; + } + + //--------------------------------------------------------------------------- + + private DataStore createShapefileDatastore(String luceneDir) throws Exception { + File file = new File(luceneDir + "/spatial/" + SPATIAL_INDEX_FILENAME + ".shp"); + file.getParentFile().mkdirs(); + if (!file.exists()) { + logger.info("Creating shapefile "+file.getAbsolutePath()); + } else { + logger.info("Using shapefile "+file.getAbsolutePath()); + } + IndexedShapefileDataStore ds = new IndexedShapefileDataStore(file.toURI().toURL(), new URI("http://geonetwork.org"), true, true, IndexType.QIX, Charset.defaultCharset()); + CoordinateReferenceSystem crs = CRS.decode("EPSG:4326"); + + if (crs != null) { + ds.forceSchemaCRS(crs); + } + + if (!file.exists()) { + SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); + AttributeDescriptor geomDescriptor = new AttributeTypeBuilder().crs(DefaultGeographicCRS.WGS84).binding(MultiPolygon.class).buildDescriptor("the_geom"); + builder.setName(SPATIAL_INDEX_FILENAME); + builder.add(geomDescriptor); + builder.add(IDS_ATTRIBUTE_NAME, String.class); + ds.createSchema(builder.buildFeatureType()); + } + + logger.info("NOTE: Using shapefile for spatial index, this can be slow for larger catalogs"); + return ds; + } + + //--------------------------------------------------------------------------- + + private String getDatabase(String url, String[] values) throws Exception { + if (url.contains("postgis")) { + return values[3]; + } else if (url.contains("oracle")) { + return values[5]; + } else { + throw new Exception("Unknown database in url "+url); + } + } + + //--------------------------------------------------------------------------- + + private String getHost(String url, String[] values) throws Exception { + if (url.contains("postgis")) { + String value = values[2]; + return value.substring(0,value.indexOf(":")); + } else if (url.contains("oracle")) { + return values[3]; + } else { + throw new Exception("Unknown database in url "+url); + } + } + + //--------------------------------------------------------------------------- + + private String getPort(String url, String values[]) throws Exception { + if (url.contains("postgis")) { + String value = values[2]; + return value.substring(value.indexOf(":")+1); + } else if (url.contains("oracle")) { + return values[4]; + } else { + throw new Exception("Unknown database in url "+url); + } + } } //============================================================================= Index: src/org/fao/gast/gui/panels/config/dbms/MainPanel.java =================================================================== --- src/org/fao/gast/gui/panels/config/dbms/MainPanel.java (revision 5897) +++ src/org/fao/gast/gui/panels/config/dbms/MainPanel.java (working copy) @@ -151,6 +151,7 @@ new OraclePanel(), new MySQLPanel(), new PostgresPanel(), + new PostgisPanel(), new GenericPanel() //--- this must be the last one }; Index: src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java =================================================================== --- src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java (revision 0) +++ src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java (revision 0) @@ -0,0 +1,172 @@ +//============================================================================== +//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This program is free software; you can redistribute it and/or modify +//=== it under the terms of the GNU General Public License as published by +//=== the Free Software Foundation; either version 2 of the License, or (at +//=== your option) any later version. +//=== +//=== This program is distributed in the hope that it will be useful, but +//=== WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== General Public License for more details. +//=== +//=== You should have received a copy of the GNU General Public License +//=== along with this program; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.gast.gui.panels.config.dbms; + +import java.util.StringTokenizer; +import javax.swing.JLabel; +import javax.swing.JTextField; +import org.dlib.gui.FlexLayout; +import org.fao.gast.lib.Lib; +import org.fao.gast.localization.Messages; + +//============================================================================== + +public class PostgisPanel extends DbmsPanel +{ + //--------------------------------------------------------------------------- + //--- + //--- Constructor + //--- + //--------------------------------------------------------------------------- + + /** + * + */ + private static final long serialVersionUID = -9039785892791080773L; + + public PostgisPanel() + { + FlexLayout fl = new FlexLayout(3,5); + fl.setColProp(1, FlexLayout.EXPAND); + setLayout(fl); + + add("0,0", new JLabel(Messages.getString("server"))); + add("0,1", new JLabel(Messages.getString("port"))); + add("0,2", new JLabel(Messages.getString("database"))); + add("0,3", new JLabel(Messages.getString("username"))); + add("0,4", new JLabel(Messages.getString("password"))); + + add("1,0", txtServer); + add("1,1", txtPort); + add("1,2", txtDatabase); + add("1,3", txtUser); + add("1,4", txtPass); + + add("2,2", new JLabel("(REQ)")); + add("2,3", new JLabel("(REQ)")); + add("2,4", new JLabel("(REQ)")); + + txtPort.setToolTipText(Messages.getString("postgres.defaultPort")); + } + + //--------------------------------------------------------------------------- + //--- + //--- DbmsPanel methods + //--- + //--------------------------------------------------------------------------- + + public String getLabel() { return "PostGIS"; } + + //--------------------------------------------------------------------------- + + public boolean matches(String url) + { + return url.startsWith(PREFIX); + } + + //--------------------------------------------------------------------------- + //--- jdbc:postgresql::/ + //--- jdbc:postgresql: + + public void retrieve() + { + String url = Lib.config.getDbmsURL(); + + //--- cut prefix + url = url.substring(PREFIX.length()); + + String server = ""; + String port = ""; + String database = url; + + if (url.startsWith("//") && url.length() > 2) + { + StringTokenizer st = new StringTokenizer(url.substring(2), "/"); + + server = st.nextToken(); + database = st.hasMoreTokens() ? st.nextToken() : ""; + + int pos = server.indexOf(":"); + + if (pos != -1) + { + port = server.substring(pos+1); + server = server.substring(0, pos); + } + } + + txtServer .setText(server); + txtPort .setText(port); + txtDatabase.setText(database); + txtUser .setText(Lib.config.getDbmsUser()); + txtPass .setText(Lib.config.getDbmsPassword()); + } + + //--------------------------------------------------------------------------- + + public void save() throws Exception + { + String server = txtServer .getText(); + String port = txtPort .getText(); + String database= txtDatabase.getText(); + + if (database.equals("")) + throw new Exception(Messages.getString("databaseNotEmpty")); + + if (!server.equals("") && !port.equals("") && !Lib.type.isInteger(port)) + throw new Exception(Messages.getString("portInt")); + + + if (server.equals("")) server="localhost"; + if (port.equals("")) port="5432"; + + String url = PREFIX +"//"+ server +":"+ port +"/"+ database; + + Lib.config.setDbmsDriver ("org.postgresql.Driver"); + Lib.config.setDbmsURL (url); + Lib.config.setDbmsUser (txtUser.getText()); + Lib.config.setDbmsPassword(txtPass.getText()); + Lib.config.removeActivator(); + Lib.config.save(); + } + + //--------------------------------------------------------------------------- + //--- + //--- Variables + //--- + //--------------------------------------------------------------------------- + + private JTextField txtServer = new JTextField(15); + private JTextField txtPort = new JTextField(6); + private JTextField txtDatabase= new JTextField(12); + private JTextField txtUser = new JTextField(12); + private JTextField txtPass = new JTextField(12); + + //--------------------------------------------------------------------------- + + private static final String PREFIX = "jdbc:postgis:"; +} + +//============================================================================== + Index: src/org/fao/gast/lib/DatabaseLib.java =================================================================== --- src/org/fao/gast/lib/DatabaseLib.java (revision 5897) +++ src/org/fao/gast/lib/DatabaseLib.java (working copy) @@ -413,7 +413,12 @@ if (cb != null) cb.creating(getObjectName(sql), getObjectType(sql)); - dbms.execute(sql); + Lib.log.info("Executing "+sql); + if (sql.trim().startsWith("SELECT")) { + dbms.select(sql); + } else { + dbms.execute(sql); + } sb = new StringBuffer(); } } @@ -483,6 +488,10 @@ else if (url.indexOf("postgresql") != -1) file = "create-db-postgres.sql"; + else if (url.indexOf("postgis") != -1) + file = "create-db-postgis.sql"; + + //--- load the dbms schema return Lib.text.load(appPath +SETUP_DIR+ "/sql/"+ file); Index: docs/Manual_fra.pdf =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Index: gast/setup/sql/create-db-postgis.sql =================================================================== --- gast/setup/sql/create-db-postgis.sql (revision 0) +++ gast/setup/sql/create-db-postgis.sql (revision 0) @@ -0,0 +1,308 @@ +-- ====================================================================== +-- === Sql Script for Database : Geonet +-- === +-- === Build : 153 +-- ====================================================================== + +CREATE TABLE Relations + ( + id int, + relatedId int, + + primary key(id,relatedId) + ); + +-- ====================================================================== + +CREATE TABLE Categories + ( + id int, + name varchar(32) not null, + + primary key(id), + unique(name) + ); + +-- ====================================================================== + +CREATE TABLE Settings + ( + id int, + parentId int, + name varchar(32) not null, + value varchar(250), + + primary key(id), + + foreign key(parentId) references Settings(id) + ); + +-- ====================================================================== + +CREATE TABLE Languages + ( + id varchar(5), + name varchar(32) not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE Sources + ( + uuid varchar(250), + name varchar(250), + isLocal char(1) default 'y', + + primary key(uuid) + ); + +-- ====================================================================== + +CREATE TABLE IsoLanguages + ( + id int, + code varchar(3) not null, + + primary key(id), + unique(code) + ); + +-- ====================================================================== + +CREATE TABLE IsoLanguagesDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references IsoLanguages(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Regions + ( + id int, + north float not null, + south float not null, + west float not null, + east float not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE RegionsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Regions(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Users + ( + id int, + username varchar(32) not null, + password varchar(40) not null, + surname varchar(32), + name varchar(32), + profile varchar(32) not null, + address varchar(128), + city varchar(128), + state varchar(32), + zip varchar(16), + country varchar(128), + email varchar(128), + organisation varchar(128), + kind varchar(16), + + primary key(id), + unique(username) + ); + +-- ====================================================================== + +CREATE TABLE Operations + ( + id int, + name varchar(32) not null, + reserved char(1) default 'n' not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE OperationsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Operations(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Groups + ( + id int, + name varchar(32) not null, + description varchar(255), + email varchar(32), + referrer int, + + primary key(id), + unique(name), + + foreign key(referrer) references Users(id) + ); + +-- ====================================================================== + +CREATE TABLE GroupsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Groups(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE UserGroups + ( + userId int, + groupId int, + + primary key(userId,groupId), + + foreign key(userId) references Users(id), + foreign key(groupId) references Groups(id) + ); + +-- ====================================================================== + +CREATE TABLE CategoriesDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Categories(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Metadata + ( + id int, + uuid varchar(250) not null, + schemaId varchar(32) not null, + isTemplate char(1) default 'n' not null, + isHarvested char(1) default 'n' not null, + createDate varchar(24) not null, + changeDate varchar(24) not null, + data text not null, + source varchar(250) not null, + title varchar(255), + root varchar(255), + harvestUuid varchar(250) default null, + owner int not null, + groupOwner int default null, + harvestUri varchar(255) default null, + rating int default 0 not null, + popularity int default 0 not null, + displayorder int, + + primary key(id), + unique(uuid), + + foreign key(owner) references Users(id), + foreign key(groupOwner) references Groups(id) + ); + +CREATE INDEX MetadataNDX1 ON Metadata(uuid); +CREATE INDEX MetadataNDX2 ON Metadata(source); + +-- ====================================================================== + +CREATE TABLE MetadataCateg + ( + metadataId int, + categoryId int, + + primary key(metadataId,categoryId), + + foreign key(metadataId) references Metadata(id), + foreign key(categoryId) references Categories(id) + ); + +-- ====================================================================== + +CREATE TABLE OperationAllowed + ( + groupId int, + metadataId int, + operationId int, + + primary key(groupId,metadataId,operationId), + + foreign key(groupId) references Groups(id), + foreign key(metadataId) references Metadata(id), + foreign key(operationId) references Operations(id) + ); + +-- ====================================================================== + +CREATE TABLE MetadataRating + ( + metadataId int, + ipAddress varchar(32), + rating int not null, + + primary key(metadataId,ipAddress), + + foreign key(metadataId) references Metadata(id) + ); + +-- ====================================================================== + +CREATE TABLE spatialIndex + ( + fid int, + id varchar(250), + + primary key(fid) + + ); + +CREATE INDEX spatialIndexNDX1 ON spatialIndex(id); +SELECT AddGeometryColumn('spatialindex', 'the_geom', 4326, 'MULTIPOLYGON', 2 ); +CREATE INDEX spatialIndexNDX2 on spatialIndex USING GIST(the_geom); + +-- ====================================================================== + Index: gast/setup/db/Settings.ddf =================================================================== --- gast/setup/db/Settings.ddf (revision 5897) +++ gast/setup/db/Settings.ddf (working copy) @@ -59,6 +59,9 @@ 86 80 distinguishedNames 87 80 userAttribs +90 1 selectionmanager +91 90 maxrecords 1000 + 100 86 base dc=fao,dc=org 101 86 users ou=people @@ -108,4 +111,7 @@ 201 200 enable true 210 1 localrating -211 210 enable false \ No newline at end of file +211 210 enable false + +500 1 indexoptimizer +501 500 interval 30 Index: web/geonetwork/xml/schemas/iso19139/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/iso19139/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/iso19139/extract-gml.xsl (working copy) @@ -12,31 +12,26 @@ + + + + + + - - - - - - - - - ,, ,, ,, ,, , - - - + + + + + ,, ,, ,, ,, , + + + + - \ No newline at end of file + Index: web/geonetwork/xml/schemas/dublin-core/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/dublin-core/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/dublin-core/extract-gml.xsl (working copy) @@ -3,15 +3,16 @@ - - - - - - - - - + + + + + + + + + + @@ -19,6 +20,7 @@ - + + - \ No newline at end of file + Index: web/geonetwork/xml/schemas/iso19110/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/iso19110/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/iso19110/extract-gml.xsl (working copy) @@ -4,4 +4,4 @@ - \ No newline at end of file + Index: web/geonetwork/xml/schemas/csw-record/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/csw-record/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/csw-record/extract-gml.xsl (working copy) @@ -18,11 +18,11 @@ --> - - + + - + @@ -30,6 +30,7 @@ - + + - \ No newline at end of file + Index: web/geonetwork/xml/schemas/fgdc-std/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/fgdc-std/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/fgdc-std/extract-gml.xsl (working copy) @@ -13,10 +13,11 @@ - - - - + + + + + @@ -24,5 +25,6 @@ + - \ No newline at end of file + Index: web/geonetwork/xml/schemas/iso19115/extract-gml.xsl =================================================================== --- web/geonetwork/xml/schemas/iso19115/extract-gml.xsl (revision 5897) +++ web/geonetwork/xml/schemas/iso19115/extract-gml.xsl (working copy) @@ -16,10 +16,11 @@ - - - - + + + + + @@ -27,5 +28,6 @@ + - \ No newline at end of file + Index: web/geonetwork/xsl/admin.xsl =================================================================== --- web/geonetwork/xsl/admin.xsl (revision 5897) +++ web/geonetwork/xsl/admin.xsl (working copy) @@ -152,7 +152,7 @@ - + @@ -187,16 +187,18 @@ - - + - -
- - - - + + + + + + + + + + +
Index: web/geonetwork/xsl/search-results-xhtml.xsl =================================================================== --- web/geonetwork/xsl/search-results-xhtml.xsl (revision 5897) +++ web/geonetwork/xsl/search-results-xhtml.xsl (working copy) @@ -101,9 +101,19 @@
- - - , + + + + + + , + + + + + , + + Index: web/geonetwork/xsl/config/config.xsl =================================================================== --- web/geonetwork/xsl/config/config.xsl (revision 5897) +++ web/geonetwork/xsl/config/config.xsl (working copy) @@ -45,6 +45,8 @@ + + @@ -170,6 +172,32 @@ + +

+ +
+ + + +
+
+
+ + + + +

+ +
+ + + +
+
+
+ + +