Index: jeeves/src/jeeves/utils/Xml.java =================================================================== --- jeeves/src/jeeves/utils/Xml.java (revision 5814) +++ jeeves/src/jeeves/utils/Xml.java (working copy) @@ -67,6 +67,7 @@ import org.xml.sax.SAXParseException; import org.xml.sax.helpers.DefaultHandler; import net.sf.saxon.FeatureKeys; +import net.sf.saxon.Configuration; //============================================================================= @@ -220,6 +221,7 @@ transFact.setAttribute(FeatureKeys.VERSION_WARNING,false); transFact.setAttribute(FeatureKeys.LINE_NUMBERING,true); transFact.setAttribute(FeatureKeys.PRE_EVALUATE_DOC_FUNCTION,true); + transFact.setAttribute(FeatureKeys.RECOVERY_POLICY,Configuration.RECOVER_SILENTLY); // Add the following to get timing info on xslt transformations //transFact.setAttribute(FeatureKeys.TIMING,true); } catch (IllegalArgumentException e) { @@ -267,6 +269,7 @@ try { factory.setAttribute(FeatureKeys.VERSION_WARNING,false); factory.setAttribute(FeatureKeys.LINE_NUMBERING,true); + factory.setAttribute(FeatureKeys.RECOVERY_POLICY,Configuration.RECOVER_SILENTLY); } catch (IllegalArgumentException e) { System.out.println("WARNING: transformerfactory doesnt like saxon attributes!"); //e.printStackTrace(); Index: jeeves/src/jeeves/utils/XmlRequest.java =================================================================== --- jeeves/src/jeeves/utils/XmlRequest.java (revision 5814) +++ jeeves/src/jeeves/utils/XmlRequest.java (working copy) @@ -57,6 +57,7 @@ import org.apache.commons.httpclient.Credentials; import org.apache.commons.httpclient.UsernamePasswordCredentials; import org.apache.commons.httpclient.auth.AuthScope; +import org.apache.commons.httpclient.auth.AuthPolicy; //============================================================================= @@ -88,6 +89,11 @@ client.setState(state); client.getParams().setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY); client.setHostConfiguration(config); + List authPrefs = new ArrayList(2); + authPrefs.add(AuthPolicy.DIGEST); + authPrefs.add(AuthPolicy.BASIC); + // This will exclude the NTLM authentication scheme + client.getParams().setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, authPrefs); } //--------------------------------------------------------------------------- Index: jeeves/src/jeeves/utils/Log.java =================================================================== --- jeeves/src/jeeves/utils/Log.java (revision 5814) +++ jeeves/src/jeeves/utils/Log.java (working copy) @@ -43,6 +43,7 @@ public static final String SCHEDULER = JEEVES+ ".scheduler"; public static final String RESOURCES = JEEVES+ ".resources"; public static final String DBMS = JEEVES+ ".dbms"; + public static final String DBMSPOOL = JEEVES+ ".dbmspool"; public class Dbms { Index: jeeves/src/jeeves/server/resources/ResourceManager.java =================================================================== --- jeeves/src/jeeves/server/resources/ResourceManager.java (revision 5814) +++ jeeves/src/jeeves/server/resources/ResourceManager.java (working copy) @@ -75,6 +75,16 @@ } //-------------------------------------------------------------------------- + /** Gets properties from the named resource provider + */ + + public Map getProps(String name) throws Exception + { + ResourceProvider provider = provManager.getProvider(name); + return provider.getProps(); + } + + //-------------------------------------------------------------------------- /** Closes all resources doing a commit */ Index: jeeves/src/jeeves/server/resources/ResourceProvider.java =================================================================== --- jeeves/src/jeeves/server/resources/ResourceProvider.java (revision 5814) +++ jeeves/src/jeeves/server/resources/ResourceProvider.java (working copy) @@ -23,6 +23,8 @@ package jeeves.server.resources; +import java.util.Map; + import org.jdom.Element; //============================================================================= @@ -40,6 +42,9 @@ /** Initializes the provider */ public void init(String name, Element config) throws Exception; + /** gets props from the provider */ + public Map getProps(); + /** Stops the provider */ public void end(); Index: jeeves/src/jeeves/resources/dbms/DbmsPool.java =================================================================== --- jeeves/src/jeeves/resources/dbms/DbmsPool.java (revision 5814) +++ jeeves/src/jeeves/resources/dbms/DbmsPool.java (working copy) @@ -23,12 +23,19 @@ package jeeves.resources.dbms; +import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + import jeeves.constants.Jeeves; import jeeves.server.resources.ResourceListener; import jeeves.server.resources.ResourceProvider; +import jeeves.utils.Log; + import org.jdom.Element; //============================================================================= @@ -43,11 +50,12 @@ private String name; private String user; private String passwd; + private String url; private int maxTries; private int maxWait; private long reconnectTime; - private HashSet hsListeners = new HashSet(); + private Set hsListeners = Collections.synchronizedSet(new HashSet()); //-------------------------------------------------------------------------- //--- @@ -64,8 +72,8 @@ user = config.getChildText(Jeeves.Res.Pool.USER); passwd = config.getChildText(Jeeves.Res.Pool.PASSWORD); + url = config.getChildText(Jeeves.Res.Pool.URL); String driver = config.getChildText(Jeeves.Res.Pool.DRIVER); - String url = config.getChildText(Jeeves.Res.Pool.URL); String size = config.getChildText(Jeeves.Res.Pool.POOL_SIZE); String maxt = config.getChildText(Jeeves.Res.Pool.MAX_TRIES); String maxw = config.getChildText(Jeeves.Res.Pool.MAX_WAIT); @@ -86,6 +94,16 @@ //-------------------------------------------------------------------------- + public Map getProps() { + Map result = new HashMap(); + result.put("name", name); + result.put("user", user); + result.put("password", passwd); + result.put("url", url); + return result; + } + + //-------------------------------------------------------------------------- public void end() { for(Enumeration e=htDbms.keys(); e.hasMoreElements();) @@ -111,10 +129,13 @@ for (int nTries = 0; nTries < maxTries; nTries++) { // try to get a free dbms + int i = 0; for(Enumeration e=htDbms.keys(); e.hasMoreElements();) { + Dbms dbms = (Dbms) e.nextElement(); Boolean locked = (Boolean) htDbms.get(dbms); + debug("DBMS Resource "+i+" is "+locked); if (!locked.booleanValue()) { @@ -131,7 +152,7 @@ if (currTime - lastConnTime >= reconnectTime) { - System.out.println("reconnecting: " + (currTime - lastConnTime) + ">=" + reconnectTime + " ms since last connection"); // FIXME + error("reconnecting: " + (currTime - lastConnTime) + ">=" + reconnectTime + " ms since last connection"); // FIXME // FIXME: what happens if it disconnects but is unable to connect again? dbms.disconnect(); @@ -139,6 +160,7 @@ } } + debug("SUCCESS: DBMS Resource "+i+" is not locked"); htDbms.put(dbms, new Boolean(true)); return dbms; } @@ -148,6 +170,7 @@ lastMessage = ex.getMessage(); } } + i++; } // wait MAX_WAIT msecs (but not after last try) if (nTries < maxTries - 1) @@ -166,12 +189,15 @@ public void close(Object resource) throws Exception { checkResource(resource); + debug("Committing and closing "+resource); ((Dbms) resource).commit(); htDbms.put(resource, new Boolean(false)); - for(ResourceListener l : hsListeners) - l.close(resource); + synchronized(hsListeners) { + for(ResourceListener l : hsListeners) + l.close(resource); + } } //-------------------------------------------------------------------------- @@ -181,6 +207,7 @@ public void abort(Object resource) throws Exception { checkResource(resource); + debug("Aborting "+resource); try { @@ -191,22 +218,24 @@ htDbms.put(resource, new Boolean(false)); } - for(ResourceListener l : hsListeners) - l.abort(resource); + synchronized(hsListeners) { + for(ResourceListener l : hsListeners) + l.abort(resource); + } } //-------------------------------------------------------------------------- public void addListener(ResourceListener l) { - hsListeners.add(l); + hsListeners.add(l); } //-------------------------------------------------------------------------- public void removeListener(ResourceListener l) { - hsListeners.remove(l); + hsListeners.remove(l); } //-------------------------------------------------------------------------- @@ -225,6 +254,11 @@ if (!locked.booleanValue()) throw new IllegalArgumentException("Resource not locked :"+resource); } + + private void debug (String message) { Log.debug (Log.DBMSPOOL, message); } + static void info (String message) { Log.info (Log.DBMSPOOL, message); } + private void warning(String message) { Log.warning(Log.DBMSPOOL, message); } + static void error (String message) { Log.error (Log.DBMSPOOL, message); } } //============================================================================= Index: jeeves/src/jeeves/resources/dbms/Dbms.java =================================================================== --- jeeves/src/jeeves/resources/dbms/Dbms.java (revision 5814) +++ jeeves/src/jeeves/resources/dbms/Dbms.java (working copy) @@ -83,10 +83,12 @@ public void connect(String username, String password) throws SQLException { - conn = DriverManager.getConnection(url, username, password); + String actualUrl = url; + if (actualUrl.contains("postgis")) actualUrl = actualUrl.replaceFirst("postgis","postgresql"); + conn = DriverManager.getConnection(actualUrl, username, password); conn.setAutoCommit(false); - if (url.toUpperCase().contains("ORACLE")) { + if (actualUrl.toUpperCase().contains("ORACLE")) { Log.debug(Log.RESOURCES,"ORACLE is using TRANSACTION_READ_COMMITTED"); conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); } else { Index: src/org/fao/geonet/kernel/DataManager.java =================================================================== --- src/org/fao/geonet/kernel/DataManager.java (revision 5814) +++ src/org/fao/geonet/kernel/DataManager.java (working copy) @@ -116,34 +116,39 @@ Log.debug(Geonet.DATA_MANAGER, "INDEX CONTENT:"); //DEBUG // index all metadata in DBMS if needed - for(int i = 0; i < list.size(); i++) - { - // get metadata - Element record = (Element) list.get(i); - String id = record.getChildText("id"); - - Log.debug(Geonet.DATA_MANAGER, "- record ("+ id +")"); //DEBUG - - Hashtable idxRec = (Hashtable)docs.get(id); - - // if metadata is not indexed index it - if (idxRec == null) - indexMetadata(dbms, id); - - // else, if indexed version is not the latest index it - else + startIndexGroup(); + try { + for(int i = 0; i < list.size(); i++) { - docs.remove(id); - - String lastChange = record.getChildText("changedate"); - String idxLastChange = (String)idxRec.get("_changeDate"); - - Log.debug(Geonet.DATA_MANAGER, "- lastChange: " + lastChange); //DEBUG - Log.debug(Geonet.DATA_MANAGER, "- idxLastChange: " + idxLastChange); //DEBUG - - if (force || !idxLastChange.equalsIgnoreCase(lastChange)) // date in index contains 't', date in DBMS contains 'T' - indexMetadata(dbms, id); + // get metadata + Element record = (Element) list.get(i); + String id = record.getChildText("id"); + + Log.debug(Geonet.DATA_MANAGER, "- record ("+ id +")"); //DEBUG + + Hashtable idxRec = (Hashtable)docs.get(id); + + // if metadata is not indexed index it + if (idxRec == null) + indexMetadataGroup(dbms, id); + + // else, if indexed version is not the latest index it + else + { + docs.remove(id); + + String lastChange = record.getChildText("changedate"); + String idxLastChange = (String)idxRec.get("_changeDate"); + + Log.debug(Geonet.DATA_MANAGER, "- lastChange: " + lastChange); //DEBUG + Log.debug(Geonet.DATA_MANAGER, "- idxLastChange: " + idxLastChange); //DEBUG + + if (force || !idxLastChange.equalsIgnoreCase(lastChange)) // date in index contains 't', date in DBMS contains 'T' + indexMetadataGroup(dbms, id); + } } + } finally { + endIndexGroup(); } Log.debug(Geonet.DATA_MANAGER, "INDEX SURPLUS:"); //DEBUG @@ -165,16 +170,42 @@ Log.debug(Geonet.DATA_MANAGER, "Indexing record (" + id + ")"); //DEBUG - indexMetadata(dbms, id, searchMan); + indexMetadata(dbms, id, searchMan, false); } //-------------------------------------------------------------------------- + public void startIndexGroup() throws Exception { + searchMan.startIndexGroup(); + } + + //-------------------------------------------------------------------------- + + public void endIndexGroup() throws Exception { + searchMan.endIndexGroup(); + } + + //-------------------------------------------------------------------------- + + public void indexMetadataGroup(Dbms dbms, String id) throws Exception { + Log.debug(Geonet.DATA_MANAGER, "Indexing record (" + id + ")"); //DEBUG + indexMetadata(dbms, id, searchMan, true); + } + + //-------------------------------------------------------------------------- + public static void indexMetadata(Dbms dbms, String id, SearchManager sm) throws Exception { + indexMetadata(dbms, id, sm, false); + } + + //-------------------------------------------------------------------------- + + public static void indexMetadata(Dbms dbms, String id, SearchManager sm, boolean indexGroup) throws Exception + { try { - indexMetadataI(dbms, id, sm); + indexMetadataI(dbms, id, sm, indexGroup); } catch (Exception e) { @@ -185,7 +216,7 @@ //-------------------------------------------------------------------------- - private static void indexMetadataI(Dbms dbms, String id, SearchManager sm) throws Exception + private static void indexMetadataI(Dbms dbms, String id, SearchManager sm, boolean indexGroup) throws Exception { Vector moreFields = new Vector(); @@ -255,7 +286,11 @@ moreFields.add(makeField("_cat", categoryName, true, true, false)); } - sm.index(schema, md, id, moreFields, isTemplate, title); + if (indexGroup) { + sm.indexGroup(schema, md, id, moreFields, isTemplate, title); + } else { + sm.index(schema, md, id, moreFields, isTemplate, title); + } } //-------------------------------------------------------------------------- @@ -630,34 +665,46 @@ public void setTemplate(Dbms dbms, int id, String isTemplate, String title) throws Exception { + setTemplateExt(dbms, id, isTemplate, title); + indexMetadata(dbms, Integer.toString(id)); + } + + //-------------------------------------------------------------------------- + + public void setTemplateExt(Dbms dbms, int id, String isTemplate, String title) throws Exception + { if (title == null) dbms.execute("UPDATE Metadata SET isTemplate=? WHERE id=?", isTemplate, id); else dbms.execute("UPDATE Metadata SET isTemplate=?, title=? WHERE id=?", isTemplate, title, id); - indexMetadata(dbms, Integer.toString(id)); } //-------------------------------------------------------------------------- public void setHarvested(Dbms dbms, int id, String harvestUuid) throws Exception { + setHarvestedExt(dbms, id, harvestUuid); + indexMetadata(dbms, Integer.toString(id)); + } + + //-------------------------------------------------------------------------- + + public void setHarvestedExt(Dbms dbms, int id, String harvestUuid) throws Exception + { String value = (harvestUuid != null) ? "y" : "n"; if (harvestUuid == null) { dbms.execute("UPDATE Metadata SET isHarvested=? WHERE id=?", value,id ); } else { dbms.execute("UPDATE Metadata SET isHarvested=?, harvestUuid=? WHERE id=?", value, harvestUuid, id); } - - indexMetadata(dbms, Integer.toString(id)); } //-------------------------------------------------------------------------- - public void setHarvested(Dbms dbms, int id, String harvestUuid, String harvestUri) throws Exception + public void setHarvestedExt(Dbms dbms, int id, String harvestUuid, String harvestUri) throws Exception { String value = (harvestUuid != null) ? "y" : "n"; String query = "UPDATE Metadata SET isHarvested=?, harvestUuid=?, harvestUri=? WHERE id=?"; dbms.execute(query, value, harvestUuid, harvestUri, id); - indexMetadata(dbms, Integer.toString(id)); } //--------------------------------------------------------------------------- @@ -2294,12 +2341,15 @@ } // add subtemplates + /* -- don't add as we need to investigate indexing for the fields + -- in the metadata table used here List subList = getSubtemplates(dbms, schema); if (subList != null) { Element subs = new Element(Edit.Info.Elem.SUBTEMPLATES); subs.addContent(subList); info.addContent(subs); } + */ return info; } Index: src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java =================================================================== --- src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java (revision 5814) +++ src/org/fao/geonet/kernel/csw/services/getrecords/CatalogSearcher.java (working copy) @@ -388,7 +388,7 @@ if (resultType == ResultType.RESULTS_WITH_SUMMARY) { summary = LuceneSearcher.makeSummary(_hits, _hits.length(), _summaryConfig, resultType.toString(), - Integer.MAX_VALUE, context.getLanguage()); + Integer.MAX_VALUE, Integer.MAX_VALUE, context.getLanguage()); summary.setName("Summary"); summary.setNamespace(Csw.NAMESPACE_GEONET); } Index: src/org/fao/geonet/kernel/csw/services/Transaction.java =================================================================== --- src/org/fao/geonet/kernel/csw/services/Transaction.java (revision 5814) +++ src/org/fao/geonet/kernel/csw/services/Transaction.java (working copy) @@ -107,30 +107,42 @@ if( transactionType.equals("insert") || transactionType.equals("update") || transactionType.equals("delete") ) { List mdList = transRequest.getChildren(); + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + DataManager dataMan = gc.getDataManager(); // insert to database, and get the number of inserted successful if( transactionType.equals("insert" ) ) { Iterator inIt = mdList.iterator(); - while (inIt.hasNext()){ - Element metadata = (Element) inIt.next().clone(); - boolean insertSuccess = insertTransaction( metadata, strFileIds, context); - if (insertSuccess) - totalInserted++; + dataMan.startIndexGroup(); + try { + while (inIt.hasNext()){ + Element metadata = (Element) inIt.next().clone(); + boolean insertSuccess = insertTransaction( metadata, strFileIds, context); + if (insertSuccess) + totalInserted++; + } + } finally { + dataMan.endIndexGroup(); } } // Update else if( transactionType.equals("update" ) ) { Iterator inIt = mdList.iterator(); - while (inIt.hasNext()){ - Element metadata = (Element) inIt.next().clone(); - if (!metadata.getName().equals("Constraint") && !metadata.getNamespace().equals(Csw.NAMESPACE_CSW)) - { - boolean updateSuccess = updateTransaction( transRequest, metadata, context ); - if (updateSuccess) - totalUpdated++; + dataMan.startIndexGroup(); + try { + while (inIt.hasNext()){ + Element metadata = (Element) inIt.next().clone(); + if (!metadata.getName().equals("Constraint") && !metadata.getNamespace().equals(Csw.NAMESPACE_CSW)) + { + boolean updateSuccess = updateTransaction( transRequest, metadata, context ); + if (updateSuccess) + totalUpdated++; + } } + } finally { + dataMan.endIndexGroup(); } } // Delete @@ -236,14 +248,14 @@ if( id == null ) return false; - dataMan.indexMetadata(dbms, id); - - fileIds.add( uuid ); - // --- Insert category if requested if (!"_none_".equals(category)) dataMan.setCategory(dbms, id, category); + dataMan.indexMetadataGroup(dbms, id); + + fileIds.add( uuid ); + dbms.commit(); return true; @@ -294,6 +306,8 @@ throw new NoApplicableCodeEx("User not allowed to update this metadata("+id+")."); dataMan.updateMetadataExt(dbms, id, xml, changeDate); + dataMan.indexMetadataGroup(dbms, id); + bReturn = true; break; } Index: src/org/fao/geonet/kernel/search/SearchManager.java =================================================================== --- src/org/fao/geonet/kernel/search/SearchManager.java (revision 5814) +++ src/org/fao/geonet/kernel/search/SearchManager.java (working copy) @@ -42,6 +42,7 @@ import javax.naming.Context; import javax.naming.InitialContext; +import jeeves.exceptions.JeevesException; import jeeves.resources.dbms.Dbms; import jeeves.utils.Log; import jeeves.utils.Util; @@ -60,6 +61,7 @@ import org.apache.lucene.index.TermEnum; import org.apache.lucene.search.Filter; import org.apache.lucene.store.Directory; + import org.fao.geonet.constants.Geonet; import org.fao.geonet.csw.common.Csw; import org.fao.geonet.csw.common.exceptions.NoApplicableCodeEx; @@ -75,12 +77,15 @@ import org.fao.geonet.kernel.search.spatial.SpatialIndexWriter; import org.fao.geonet.kernel.search.spatial.TouchesFilter; import org.fao.geonet.kernel.search.spatial.WithinFilter; + +import org.geotools.data.DataStore; import org.geotools.data.DefaultTransaction; import org.geotools.data.FeatureSource; import org.geotools.data.Transaction; import org.geotools.gml3.GMLConfiguration; import org.geotools.xml.Configuration; import org.geotools.xml.Parser; + import org.jdom.Element; import com.k_int.IR.Searchable; @@ -115,15 +120,18 @@ private LoggingContext _cat; private Searchable _hssSearchable; private Spatial _spatial; + private LuceneIndexWriterFactory _indexWriter; //----------------------------------------------------------------------------- /** * @param appPath * @param luceneDir + * @param summaryConfigXmlFile + * @param dataStore * @throws Exception */ - public SearchManager(String appPath, String luceneDir, String summaryConfigXmlFile) throws Exception + public SearchManager(String appPath, String luceneDir, String summaryConfigXmlFile, DataStore dataStore) throws Exception { _summaryConfig = Xml.loadStream(new FileInputStream(new File(appPath,summaryConfigXmlFile))); _stylesheetsDir = new File(appPath, SEARCH_STYLESHEETS_DIR_PATH); @@ -134,12 +142,11 @@ _luceneDir = new File(luceneDir+ "/nonspatial"); - if (!_luceneDir.isAbsolute()) - _luceneDir = new File(appPath + luceneDir+ "/nonspatial"); + if (!_luceneDir.isAbsolute()) _luceneDir = new File(appPath + luceneDir+ "/nonspatial"); - _luceneDir.getParentFile().mkdirs(); + _luceneDir.getParentFile().mkdirs(); - _spatial = new Spatial(_luceneDir.getParent() + "/spatial"); + _spatial = new Spatial(dataStore); // Define the default Analyzer _analyzer = new PerFieldAnalyzerWrapper(new StandardAnalyzer()); @@ -200,9 +207,7 @@ private void initLucene(String appPath, String luceneDir) throws Exception { - //--- the lucene dir cannot be inside the CVS so it is better to create - // it here - setupIndex(false); // RGFIX: check if this is correct + setupIndex(false); } //----------------------------------------------------------------------------- @@ -284,10 +289,45 @@ * @param title * @throws Exception */ - public synchronized void index(String type, Element metadata, String id, - List moreFields, String isTemplate, String title) throws Exception + public synchronized void index(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception { + Document doc = buildIndexDocument(type, metadata, id, moreFields, isTemplate, title); + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from index"); + _indexWriter.openWriter(); + try { + _indexWriter.addDocument(doc); + } finally { + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from index"); + _indexWriter.closeWriter(); + } + + _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + } + + public void startIndexGroup() throws Exception { + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from startIndexGroup"); + _indexWriter.openWriter(); + } + + public synchronized void indexGroup(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception + { + Document doc = buildIndexDocument(type, metadata, id, moreFields, isTemplate, title); + _indexWriter.addDocument(doc); + + _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + } + + public void endIndexGroup() throws Exception { + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from endIndexGroup"); + _indexWriter.closeWriter(); + } + + private Document buildIndexDocument(String type, Element metadata, String id, List moreFields, String isTemplate, String title) throws Exception + { + + Log.debug(Geonet.INDEX_ENGINE, "Deleting "+id+" from index"); delete("_id", id); + Log.debug(Geonet.INDEX_ENGINE, "Finished Delete"); Element xmlDoc; @@ -322,16 +362,7 @@ + Xml.getString(xmlDoc)); Document doc = newDocument(xmlDoc); - - IndexWriter writer = new IndexWriter(_luceneDir, _analyzer, false); - try { - writer.addDocument(doc); - lazyOptimize(writer); - } finally { - writer.close(); - _indexReader = getIndexReader(); - } - _spatial.writer().index(_schemasDir.getPath(), type, id, metadata); + return doc; } /** @@ -381,13 +412,15 @@ public synchronized void delete(String fld, String txt) throws Exception { // possibly remove old document - IndexReader indexReader = IndexReader.open(_luceneDir); + Log.debug(Geonet.INDEX_ENGINE, "Opening Writer from delete"); + _indexWriter.openWriter(); try { - _spatial.writer().delete(txt); - indexReader.deleteDocuments(new Term(fld, txt)); + _indexWriter.deleteDocuments(new Term(fld, txt)); } finally { - indexReader.close(); + Log.debug(Geonet.INDEX_ENGINE, "Closing Writer from delete"); + _indexWriter.closeWriterNoOptimize(); } + _spatial.writer().delete(txt); } //-------------------------------------------------------------------------------- @@ -443,7 +476,7 @@ .getAbsolutePath(); return Xml.transform(xml, styleSheet); } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Indexing stylesheet contains errors : " + e.getMessage()); throw e; } @@ -459,7 +492,7 @@ .getAbsolutePath(); return Xml.transform(xml, styleSheetPath); } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Search stylesheet contains errors : " + e.getMessage()); throw e; } @@ -504,21 +537,21 @@ indexReader.close(); badIndex = false; } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Exception while opening lucene index, going to rebuild it: " + e.getMessage()); } } // if rebuild forced or bad index then rebuild index if (rebuild || badIndex) { - Log.error(Geonet.SEARCH_ENGINE, "Rebuilding lucene index"); - if (_spatial != null) - _spatial.writer().reset(); + Log.error(Geonet.INDEX_ENGINE, "Rebuilding lucene index"); + if (_spatial != null) _spatial.writer().reset(); IndexWriter writer = new IndexWriter(_luceneDir, _analyzer, true); writer.close(); } - _indexReader = IndexReader.open(_luceneDir); + _indexReader = IndexReader.open(_luceneDir); + _indexWriter = new LuceneIndexWriterFactory(_luceneDir, _analyzer); } /* @@ -530,11 +563,14 @@ */ public boolean rebuildIndex(DataManager dataMan, Dbms dbms) { try { + if (_indexWriter.isOpen()) { + throw new Exception("Cannot rebuild index while it is being updated - please wait till later"); + } setupIndex(true); dataMan.init(dbms, true); return true; } catch (Exception e) { - Log.error(Geonet.SEARCH_ENGINE, + Log.error(Geonet.INDEX_ENGINE, "Exception while rebuilding lucene index, going to rebuild it: " + e.getMessage()); return false; @@ -585,49 +621,6 @@ //-------------------------------------------------------------------------------- - private static final long TIME_BETWEEN_OPTS = 1000; // time between two optimizations in ms - private static final int UPDTATES_BETWEEN_OPTS = 10; // number of updates between two optimizations - - private long lastOptTime = 0; // time since last optimization - private int updateCount = UPDTATES_BETWEEN_OPTS - 1; // number of updates since last uptimization - private boolean optimizing = false; // true iff optimization is in progress - private Object mutex = new Object(); // RGFIX: check concurrent access from multiple servlets - /** - * lazy optimization: optimize index if - * at least TIME_BETWEEN_OPTS time passed or - * at least UPDTATES_BETWEEN_OPTS updates were performed - * since last optimization - * @param writer - * @throws Exception - */ - private void lazyOptimize(IndexWriter writer) - throws Exception - { - if (optimizing) return; - - boolean doOptimize; - synchronized (mutex) - { - if (System.currentTimeMillis() - lastOptTime < TIME_BETWEEN_OPTS - && ++updateCount < UPDTATES_BETWEEN_OPTS) - doOptimize = false; - else - { - doOptimize = true; - optimizing = true; - updateCount = 0; - } - } - if (doOptimize) - { - // System.out.println("**** OPTIMIZING"); // DEBUG - - writer.optimize(); - lastOptTime = System.currentTimeMillis(); - optimizing = false; - } - } - public Spatial getSpatial() { return _spatial; @@ -636,6 +629,7 @@ public class Spatial { + private final DataStore _datastore; private static final long TIME_BETWEEN_SPATIAL_COMMITS = 10000; private final Map> _types; { @@ -667,7 +661,6 @@ } _types = Collections.unmodifiableMap(types); } - private final String _appPath; private final Transaction _transaction; private final Timer _timer; private final Parser _gmlParser; @@ -675,23 +668,16 @@ private SpatialIndexWriter _writer; private Committer _committerTask; - public Spatial(String appPath) throws Exception + public Spatial(DataStore dataStore) throws Exception { _lock = new ReentrantLock(); - _appPath = appPath; + _datastore = dataStore; _transaction = new DefaultTransaction("SpatialIndexWriter"); _timer = new Timer(true); _gmlParser = new Parser(new GMLConfiguration()); boolean rebuildIndex = false; - // This must be before createWriter because createWriter will create - // the file - // and therefore the test will not be worthwhile - if (!SpatialIndexWriter.createDataStoreFile(appPath).exists()) { - Log.error(Geonet.SEARCH_ENGINE, "Rebuild index because spatial index does not exist."); - rebuildIndex = true; - } - rebuildIndex = createWriter(appPath); + rebuildIndex = createWriter(_datastore); if (rebuildIndex) { setupIndex(true); }else{ @@ -703,16 +689,21 @@ addShutdownHook(); } - private boolean createWriter(String appPath) throws IOException + private boolean createWriter(DataStore datastore) throws IOException { boolean rebuildIndex; try { - _writer = new SpatialIndexWriter(appPath, _gmlParser, + _writer = new SpatialIndexWriter(datastore, _gmlParser, _transaction, _lock); rebuildIndex = _writer.getFeatureSource().getSchema() == null; } catch (Exception e) { - if (_writer != null) - _writer.delete(); + String exceptionString = Xml.getString(JeevesException.toElement(e)); + Log.warning(Geonet.SPATIAL, "Failure to make _writer, maybe a problem but might also not be an issue:"+exceptionString); + try { + _writer.reset(); + } catch (Exception e1) { + Log.error(Geonet.SPATIAL, "Unable to call reset on Spatial writer"); + } rebuildIndex = true; } return rebuildIndex; @@ -792,7 +783,7 @@ private SpatialIndexWriter writerNoLocking() throws Exception { if (_writer == null) { - _writer = new SpatialIndexWriter(_appPath, _gmlParser, + _writer = new SpatialIndexWriter(_datastore, _gmlParser, _transaction, _lock); } return _writer; Index: src/org/fao/geonet/kernel/search/LuceneSearcher.java =================================================================== --- src/org/fao/geonet/kernel/search/LuceneSearcher.java (revision 5814) +++ src/org/fao/geonet/kernel/search/LuceneSearcher.java (working copy) @@ -97,6 +97,7 @@ private Element _elSummary; private int _maxSummaryKeys; + private int _maxRecordsInKeywordSummary; private int _numHits; private String _resultType; private String _language; @@ -242,6 +243,11 @@ private void computeQuery(ServiceContext srvContext, Element request, ServiceConfig config) throws Exception { + + String sMaxRecordsInKeywordSummary = request.getChildText("maxRecordsInKeywordSummary"); + if (sMaxRecordsInKeywordSummary == null) sMaxRecordsInKeywordSummary = config.getValue("maxRecordsInKeywordSummary", "1000"); + _maxRecordsInKeywordSummary = Integer.parseInt(sMaxRecordsInKeywordSummary); + String sMaxSummaryKeys = request.getChildText("maxSummaryKeys"); if (sMaxSummaryKeys == null) sMaxSummaryKeys = config.getValue("maxSummaryKeys", "10"); _maxSummaryKeys = Integer.parseInt(sMaxSummaryKeys); @@ -341,7 +347,7 @@ Log.debug(Geonet.SEARCH_ENGINE, "Hits found : "+ _hits.length()); if (_elSummary == null) { - _elSummary = makeSummary(_hits, getSize(), _summaryConfig, _resultType, _maxSummaryKeys, _language); + _elSummary = makeSummary(_hits, getSize(), _summaryConfig, _resultType, _maxSummaryKeys, _maxRecordsInKeywordSummary, _language); } setValid(true); @@ -509,12 +515,13 @@ //-------------------------------------------------------------------------------- - public static Element makeSummary(Hits hits, int count, Element summaryConfig, String resultType, int maxSummaryKeys, String langCode) throws Exception + public static Element makeSummary(Hits hits, int count, Element summaryConfig, String resultType, int maxSummaryKeys, int maxRecordsInKeywordSummary, String langCode) throws Exception { Element elSummary = new Element("summary"); elSummary.setAttribute("count", count+""); elSummary.setAttribute("type", "local"); + elSummary.setAttribute("hitsusedforkeywordstats", Math.min(maxRecordsInKeywordSummary, count)+""); Element resultTypeConfig = summaryConfig.getChild("def").getChild(resultType); List elements = resultTypeConfig.getChildren(); @@ -542,7 +549,7 @@ SortOption sortOption = SortOption.parse(order); SummaryComparator summaryComparator = new SummaryComparator(sortOption, Type.parse(type), langCode, summaryConfig.getChild("typeConfig")); - summarize(elSummary, hits, key, count, plural, name, summaryComparator, max); + summarize(elSummary, hits, key, Math.min(maxRecordsInKeywordSummary, count), plural, name, summaryComparator, max); } return elSummary; } @@ -552,6 +559,7 @@ { HashMap summary = new HashMap(); + Log.info(Geonet.SEARCH_ENGINE,"Processing "+count+" hits for keyword summary"); for (int i = 0; i < count; i++) { Document doc = phits.doc(i); String hits[] = doc.getValues(indexKey); Index: src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java =================================================================== --- src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java (revision 0) +++ src/org/fao/geonet/kernel/search/LuceneIndexWriterFactory.java (revision 0) @@ -0,0 +1,129 @@ +package org.fao.geonet.kernel.search; + +import java.io.File; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import jeeves.utils.Log; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; + +import org.fao.geonet.constants.Geonet; + +/* Lucene only allows one IndexWriter to be open at a time. + However, multiple threads can use this single IndexWriter. + This class manages a global IndexWriter and uses reference counting to + determine when it can be closed. */ + +public class LuceneIndexWriterFactory { + + protected IndexWriter writer; + protected int count; + private File _luceneDir; + private PerFieldAnalyzerWrapper _analyzer; + + + public LuceneIndexWriterFactory(File luceneDir, PerFieldAnalyzerWrapper analyzer) { + _luceneDir = luceneDir; + _analyzer = analyzer; + } + + public synchronized void openWriter() throws Exception { + if (count == 0) { + writer = new IndexWriter(_luceneDir, _analyzer, false); + writer.setRAMBufferSizeMB(48.0d); + // this figure has probably not been investigated enough - not much + // difference between 48, 128 and 512.... + } + count++; + Log.info(Geonet.INDEX_ENGINE, "Opening Indexwriter, ref count "+count+" ram in use "+writer.ramSizeInBytes()+" docs buffered "+writer.numRamDocs()); + } + + public void closeWriterNoOptimize() throws Exception { + + // lower reference count, close if count reaches zero + synchronized(this) { + if (count > 0) { + count--; + Log.info(Geonet.INDEX_ENGINE, "Closing Indexwriter without lazyOptimize, ref count "+count+" ram in use "+writer.ramSizeInBytes()+" docs buffered "+writer.numRamDocs()); + if (count==0) writer.close(); + } + } + } + + public synchronized boolean isOpen() { + if (count > 0) return true; + else return false; + } + + public void closeWriter() throws Exception { + lazyOptimize(); + + // lower reference count, close if count reaches zero + synchronized(this) { + if (count > 0) { + count--; + Log.info(Geonet.INDEX_ENGINE, "Closing Indexwriter, ref count "+count+" ram in use "+writer.ramSizeInBytes()+" docs buffered "+writer.numRamDocs()); + if (count==0) writer.close(); + } + } + } + + public void addDocument(Document doc) throws Exception { + writer.addDocument(doc); + } + + public void deleteDocuments(Term term) throws Exception { + writer.deleteDocuments(term); + } + + //---------------------------------------------------------------------------- + + // time between two optimizations in ms + private static final long TIME_BETWEEN_OPTS = 10000; + // number of updates between two optimizations + private static final int UPDTATES_BETWEEN_OPTS = 1000; + + // time since last optimization + private long lastOptTime = 0; + // number of updates since last uptimization + private int updateCount = UPDTATES_BETWEEN_OPTS - 1; + // true iff optimization is in progress + private boolean optimizing = false; + // RGFIX: check concurrent access from multiple servlets + private Object mutex = new Object(); + /** + * lazy optimization: optimize index if + * at least TIME_BETWEEN_OPTS time passed or + * at least UPDTATES_BETWEEN_OPTS updates were performed + * since last optimization + * @param writer + * @throws Exception + */ + private void lazyOptimize() throws Exception { + if (optimizing) return; + + boolean doOptimize; + synchronized (mutex) { + + if (System.currentTimeMillis() - lastOptTime < TIME_BETWEEN_OPTS && ++updateCount < UPDTATES_BETWEEN_OPTS) { + doOptimize = false; + } else { + doOptimize = true; + optimizing = true; + updateCount = 0; + } + } + if (doOptimize) { + writer.optimize(); + lastOptTime = System.currentTimeMillis(); + optimizing = false; + } + } + +} Index: src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java =================================================================== --- src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java (revision 5814) +++ src/org/fao/geonet/kernel/search/spatial/SpatialIndexWriter.java (working copy) @@ -26,46 +26,37 @@ import java.io.File; import java.io.IOException; import java.io.StringReader; -import java.net.URI; -import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; +import javax.xml.parsers.ParserConfigurationException; + import jeeves.utils.Log; import jeeves.utils.Xml; -import org.geotools.data.DefaultTransaction; +import org.fao.geonet.constants.Geonet; +import org.geotools.data.DataStore; import org.geotools.data.FeatureSource; import org.geotools.data.FeatureStore; import org.geotools.data.Transaction; -import org.geotools.data.shapefile.indexed.IndexType; -import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; import org.geotools.factory.CommonFactoryFinder; import org.geotools.factory.GeoTools; -import org.geotools.feature.AttributeTypeBuilder; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureCollections; import org.geotools.feature.FeatureIterator; -import org.geotools.feature.SchemaException; import org.geotools.feature.simple.SimpleFeatureBuilder; -import org.geotools.feature.simple.SimpleFeatureTypeBuilder; -import org.geotools.referencing.CRS; -import org.geotools.referencing.crs.DefaultGeographicCRS; -import org.geotools.xml.Configuration; import org.geotools.xml.Parser; import org.jdom.Element; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; -import org.opengis.feature.type.AttributeDescriptor; import org.opengis.filter.Filter; import org.opengis.filter.FilterFactory2; import org.opengis.filter.identity.FeatureId; -import org.opengis.referencing.crs.CoordinateReferenceSystem; +import org.xml.sax.SAXException; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; @@ -77,41 +68,26 @@ /** * This class is responsible for extracting geographic information from metadata * and writing that information to a storage mechanism. - * + * * @author jeichar */ +@SuppressWarnings("unchecked") public class SpatialIndexWriter { static final String IDS_ATTRIBUTE_NAME = "id"; static final String GEOM_ATTRIBUTE_NAME = "the_geom"; - static final String SPATIAL_INDEX_FILENAME = "spatialIndex"; + static final String SPATIAL_INDEX_TYPENAME = "spatialindex"; private static final int MAX_WRITES_IN_TRANSACTION = 5000; private final Parser _parser; private final Transaction _transaction; - private final SimpleFeatureType _schema; - private final File _file; private final Lock _lock; - private FeatureStore _featureStore; + private FeatureStore _featureStore; private STRtree _index; private static int _writes; - public SpatialIndexWriter(String indexBasedir, Configuration config) - throws Exception - { - this(indexBasedir, new Parser(config), - new DefaultTransaction("SpatialIndexWriter"), - new ReentrantLock()); - } - - public SpatialIndexWriter(String indexBasedir, Configuration config, - Transaction transaction) throws Exception - { - this(indexBasedir, new Parser(config), transaction, new ReentrantLock()); - } - - public SpatialIndexWriter(String indexBasedir, Parser parser, + public SpatialIndexWriter(DataStore datastore, Parser parser, Transaction transaction, Lock lock) throws Exception { // Note: The Configuration takes a long time to create so it is worth @@ -120,18 +96,16 @@ _parser = parser; _parser.setStrict(false); _parser.setValidating(false); - _file = createDataStoreFile(indexBasedir); _transaction = transaction; - _schema = createSchema(); - _featureStore = createFeatureStore(); + _featureStore = createFeatureStore(datastore); _featureStore.setTransaction(_transaction); } /** * Add a metadata record to the index - * + * * @param schemasDir * the base directory that contains the different metadata * schemas @@ -146,16 +120,21 @@ _lock.lock(); try { _index = null; - Geometry[] extractGeometriesFrom = extractGeometriesFrom( + Geometry geometry = extractGeometriesFrom( schemasDir, type, metadata, _parser); - if (extractGeometriesFrom.length > 0) { + if (geometry != null) { FeatureCollection features = FeatureCollections.newCollection(); - for (Geometry geometry : extractGeometriesFrom) { - Object[] data = { geometry, id }; - features.add(SimpleFeatureBuilder.build(_schema, data, - SimpleFeatureBuilder.createDefaultFeatureId())); + Object[] data; + SimpleFeatureType schema = _featureStore.getSchema(); + if(schema.getDescriptor(0) == schema.getGeometryDescriptor()){ + data = new Object[] { geometry, id }; + } else { + data = new Object[] { id, geometry}; } + + features.add(SimpleFeatureBuilder.build(schema, data, + SimpleFeatureBuilder.createDefaultFeatureId())); _featureStore.addFeatures(features); @@ -187,20 +166,6 @@ } } - public void delete() throws IOException - { - _lock.lock(); - try { - if (_featureStore.getTransaction() != Transaction.AUTO_COMMIT) { - close(); - } - _index = null; - delete(_file.getParentFile()); - } finally { - _lock.unlock(); - } - } - public FeatureSource getFeatureSource() { return _featureStore; @@ -249,7 +214,7 @@ if (_index==null) { populateIndex(); } - return _index; + return _index; } finally { _lock.unlock(); } @@ -261,13 +226,11 @@ */ public void reset() throws Exception { - _lock.lock(); try { _featureStore.setTransaction(Transaction.AUTO_COMMIT); _index=null; - delete(_file); - _featureStore=createFeatureStore(); + _featureStore.removeFeatures(Filter.INCLUDE); _featureStore.setTransaction(_transaction); }finally{ _lock.unlock(); @@ -278,10 +241,9 @@ * Extracts a Geometry Collection from metadata default visibility for * testing access. */ - static MultiPolygon[] extractGeometriesFrom(String schemasDir, String type, + static MultiPolygon extractGeometriesFrom(String schemasDir, String type, Element metadata, Parser parser) throws Exception { - org.geotools.util.logging.Logging.getLogger("org.geotools.xml") .setLevel(Level.SEVERE); File schemaDir = new File(schemasDir, type); @@ -289,41 +251,66 @@ .getAbsolutePath(); Element transform = Xml.transform(metadata, sSheet); if (transform.getChildren().size() == 0) { - return new MultiPolygon[0]; + return null; } - String gml = Xml.getString(transform); + List allPolygons = new ArrayList(); + for (Element geom : (List)transform.getChildren()) { + String gml = Xml.getString(geom); - try { - Object value = parser.parse(new StringReader(gml)); - if (value instanceof HashMap) { - HashMap map = (HashMap) value; - List geoms = new ArrayList(); - for (Object entry : map.values()) { - addToList(geoms, entry); + try { + MultiPolygon jts = parseGml(parser, gml); + for (int i = 0; i < jts.getNumGeometries(); i++) { + allPolygons.add((Polygon) jts.getGeometryN(i)); } - if( geoms.isEmpty() ){ - return new MultiPolygon[0]; - } else if( geoms.size()>1 ){ - GeometryFactory factory = geoms.get(0).getFactory(); - return new MultiPolygon[]{factory.createMultiPolygon(geoms.toArray(new Polygon[0]))}; - } else { - return new MultiPolygon[]{toMultiPolygon(geoms.get(0))}; - } + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Failed to convert gml to jts object: "+gml+"\n\t"+e.getMessage()); + e.printStackTrace(); + // continue + } + } - } else if (value == null) { - return new MultiPolygon[0]; + if( allPolygons.isEmpty()){ + return null; + }else{ + try { + Polygon[] array = new Polygon[allPolygons.size()]; + GeometryFactory geometryFactory = allPolygons.get(0).getFactory(); + return geometryFactory.createMultiPolygon(allPolygons.toArray(array)); + } catch (Exception e) { + Log.error(Geonet.INDEX_ENGINE, "Failed to create a MultiPolygon from: "+allPolygons); + // continue + return null; + } + } + } + + public static MultiPolygon parseGml(Parser parser, String gml) throws IOException, SAXException, + ParserConfigurationException + { + Object value = parser.parse(new StringReader(gml)); + if (value instanceof HashMap) { + HashMap map = (HashMap) value; + List geoms = new ArrayList(); + for (Object entry : map.values()) { + addToList(geoms, entry); + } + if( geoms.isEmpty() ){ + return null; + } else if( geoms.size()>1 ){ + GeometryFactory factory = geoms.get(0).getFactory(); + return factory.createMultiPolygon(geoms.toArray(new Polygon[0])); } else { - return new MultiPolygon[] { toMultiPolygon((Geometry) value) }; + return toMultiPolygon(geoms.get(0)); } - } catch (Exception e) { - return new MultiPolygon[0]; + + } else if (value == null) { + return null; + } else { + return toMultiPolygon((Geometry) value); } } - /** - * @see SpatialIndexWriter#extractGeometriesFrom(String, String, Element) - */ - private static void addToList(List geoms, Object entry) + public static void addToList(List geoms, Object entry) { if (entry instanceof Polygon) { geoms.add((Polygon) entry); @@ -335,16 +322,6 @@ } } - private static void delete(File parentFile) - { - if (parentFile.isDirectory()) { - for (File file : parentFile.listFiles()) { - delete(file); - } - } - parentFile.delete(); - } - private void populateIndex() throws IOException { _index = new STRtree(); @@ -362,63 +339,25 @@ } } - private SimpleFeatureType createSchema() throws SchemaException + private FeatureStore createFeatureStore(DataStore datastore) throws Exception { - SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); - AttributeDescriptor geomDescriptor = new AttributeTypeBuilder().crs( - DefaultGeographicCRS.WGS84).binding(MultiPolygon.class) - .buildDescriptor("the_geom"); - builder.setName(SPATIAL_INDEX_FILENAME); - builder.add(geomDescriptor); - builder.add(IDS_ATTRIBUTE_NAME, String.class); - return builder.buildFeatureType(); - } + DataStore ds = datastore; - public static File createDataStoreFile(String indexBasedir) - { - return new File(indexBasedir + "/" + SPATIAL_INDEX_FILENAME + ".shp"); - // return new File(indexBasedir + "/" + SPATIAL_INDEX_FILENAME + "/" - // + SPATIAL_INDEX_FILENAME + ".h2"); + return (FeatureStore) ds.getFeatureSource(SPATIAL_INDEX_TYPENAME); } - /** - * Create a Shapefile datastore in WGS84. - * - * @return - * @throws Exception - */ - private FeatureStore createFeatureStore() throws Exception + public static MultiPolygon toMultiPolygon(Geometry geometry) { - _file.getParentFile().mkdirs(); - - IndexedShapefileDataStore ds = new IndexedShapefileDataStore(_file - .toURI().toURL(), new URI("http://geonetwork.org"), false, - true, IndexType.NONE, Charset.defaultCharset()); - CoordinateReferenceSystem crs = CRS.decode("EPSG:4326"); - - if (crs != null) { - ds.forceSchemaCRS(crs); - } - - if (!_file.exists()) { - ds.createSchema(_schema); - } - - return (FeatureStore) ds.getFeatureSource(_schema.getTypeName()); - } - - private static MultiPolygon toMultiPolygon(Geometry geometry) - { - if (geometry instanceof Polygon) { + if (geometry instanceof Polygon) { Polygon polygon = (Polygon) geometry; - + return geometry.getFactory().createMultiPolygon( new Polygon[] { polygon }); }else if (geometry instanceof MultiPolygon) { return (MultiPolygon) geometry; } String message = geometry.getClass()+" cannot be converted to a polygon. Check Metadata"; - Log.error("SpatialIndexWriter", message); + Log.error(Geonet.INDEX_ENGINE, message); throw new IllegalArgumentException(message); } Index: src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java =================================================================== --- src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java (revision 5814) +++ src/org/fao/geonet/kernel/search/spatial/SpatialFilter.java (working copy) @@ -53,7 +53,7 @@ SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.add(SpatialIndexWriter.GEOM_ATTRIBUTE_NAME, Geometry.class,DefaultGeographicCRS.WGS84); builder.setDefaultGeometry(SpatialIndexWriter.GEOM_ATTRIBUTE_NAME); - builder.setName(SpatialIndexWriter.SPATIAL_INDEX_FILENAME); + builder.setName(SpatialIndexWriter.SPATIAL_INDEX_TYPENAME); FEATURE_TYPE = builder.buildFeatureType(); } @@ -207,4 +207,4 @@ "createGeomFilter must be overridden if createFilter is not overridden"); } -} \ No newline at end of file +} Index: src/org/fao/geonet/kernel/mef/Importer.java =================================================================== --- src/org/fao/geonet/kernel/mef/Importer.java (revision 5814) +++ src/org/fao/geonet/kernel/mef/Importer.java (working copy) @@ -53,6 +53,12 @@ public static List doImport(final Element params, final ServiceContext context, File mefFile, final String stylePath) throws Exception { + return doImport(params, context, mefFile, stylePath, false); + } + + public static List doImport(final Element params, + final ServiceContext context, File mefFile, final String stylePath, + final boolean indexGroup) throws Exception { final GeonetContext gc = (GeonetContext) context .getHandlerContext(Geonet.CONTEXT_NAME); final DataManager dm = gc.getDataManager(); @@ -304,8 +310,8 @@ dbms.execute("UPDATE Metadata SET popularity=? WHERE id=?", new Integer(popularity), iId); - dm.setTemplate(dbms, iId, isTemplate, null); - dm.setHarvested(dbms, iId, null); + dm.setTemplateExt(dbms, iId, isTemplate, null); + dm.setHarvestedExt(dbms, iId, null); String pubDir = Lib.resource.getDir(context, "public", id .get(index)); @@ -323,7 +329,11 @@ else addOperations(dm, dbms, privileges, id.get(index), groupId); - dm.indexMetadata(dbms, id.get(index)); + if (indexGroup) { + dm.indexMetadataGroup(dbms, id.get(index)); + } else { + dm.indexMetadata(dbms, id.get(index)); + } } // -------------------------------------------------------------------- Index: src/org/fao/geonet/kernel/mef/MEFLib.java =================================================================== --- src/org/fao/geonet/kernel/mef/MEFLib.java (revision 5814) +++ src/org/fao/geonet/kernel/mef/MEFLib.java (working copy) @@ -148,6 +148,11 @@ V2 } + public static List doImportIndexGroup(Element params, ServiceContext context, File mefFile, String stylePath) throws Exception { + return Importer.doImport(params, context, mefFile, stylePath, true); + } + + // -------------------------------------------------------------------------- public static List doImport(Element params, ServiceContext context, File mefFile, String stylePath) throws Exception { Index: src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java (working copy) @@ -144,32 +144,37 @@ //----------------------------------------------------------------------- //--- insert/update new metadata - for(RecordInfo ri : records) - { - result.totalMetadata++; - - if (!dataMan.existsSchema(ri.schema)) + dataMan.startIndexGroup(); + try { + for(RecordInfo ri : records) { - log.debug(" - Metadata skipped due to unknown schema. uuid:"+ ri.uuid - +", schema:"+ ri.schema); - result.unknownSchema++; - } - else - { - String id = dataMan.getMetadataId(dbms, ri.uuid); + result.totalMetadata++; - // look up value of localrating/enabled - GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); - SettingManager settingManager = gc.getSettingManager(); - boolean localRating = settingManager.getValueAsBool("system/localrating/enabled", false); - - if (id == null) { - addMetadata(ri, localRating); + if (!dataMan.existsSchema(ri.schema)) + { + log.debug(" - Metadata skipped due to unknown schema. uuid:"+ ri.uuid + +", schema:"+ ri.schema); + result.unknownSchema++; } - else { - updateMetadata(ri, id, localRating); + else + { + String id = dataMan.getMetadataId(dbms, ri.uuid); + + // look up value of localrating/enabled + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + SettingManager settingManager = gc.getSettingManager(); + boolean localRating = settingManager.getValueAsBool("system/localrating/enabled", false); + + if (id == null) { + addMetadata(ri, localRating); + } + else { + updateMetadata(ri, id, localRating); + } } } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for : "+ params.name); @@ -273,8 +278,8 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, isTemplate, null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, isTemplate, null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); if(!localRating) { String rating = general.getChildText("rating"); @@ -295,7 +300,7 @@ addPrivileges(id, info.getChild("privileges")); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.addedMetadata++; return id; @@ -553,7 +558,7 @@ addPrivileges(id, info.getChild("privileges")); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } //-------------------------------------------------------------------------- Index: src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/geonet20/Aligner.java (working copy) @@ -100,38 +100,43 @@ //----------------------------------------------------------------------- //--- insert/update new metadata - for(Iterator i=mdList.iterator(); i.hasNext(); ) - { - Element info = ((Element) i.next()).getChild("info", Edit.NAMESPACE); + dataMan.startIndexGroup(); + try { + for(Iterator i=mdList.iterator(); i.hasNext(); ) + { + Element info = ((Element) i.next()).getChild("info", Edit.NAMESPACE); - String remoteId = info.getChildText("id"); - String remoteUuid= info.getChildText("uuid"); - String schema = info.getChildText("schema"); - String changeDate= info.getChildText("changeDate"); + String remoteId = info.getChildText("id"); + String remoteUuid= info.getChildText("uuid"); + String schema = info.getChildText("schema"); + String changeDate= info.getChildText("changeDate"); - this.result.totalMetadata++; + this.result.totalMetadata++; - log.debug("Obtained remote id="+ remoteId +", changeDate="+ changeDate); + log.debug("Obtained remote id="+ remoteId +", changeDate="+ changeDate); - if (!dataMan.existsSchema(schema)) - { - log.debug(" - Skipping unsupported schema : "+ schema); - this.result.schemaSkipped++; - } - else - { - String id = dataMan.getMetadataId(dbms, remoteUuid); + if (!dataMan.existsSchema(schema)) + { + log.debug(" - Skipping unsupported schema : "+ schema); + this.result.schemaSkipped++; + } + else + { + String id = dataMan.getMetadataId(dbms, remoteUuid); - if (id == null) id = addMetadata(siteId, info); + if (id == null) id = addMetadata(siteId, info); else updateMetadata(siteId, info, id); - dbms.commit(); + dbms.commit(); - //--- maybe the metadata was unretrievable + //--- maybe the metadata was unretrievable - if (id != null) - dataMan.indexMetadata(dbms, id); + if (id != null) + dataMan.indexMetadataGroup(dbms, id); + } } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for site-id="+ siteId); Index: src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/csw/Aligner.java (working copy) @@ -145,14 +145,19 @@ //----------------------------------------------------------------------- //--- insert/update new metadata - for(RecordInfo ri : records) - { - result.totalMetadata++; + dataMan.startIndexGroup(); + try { + for(RecordInfo ri : records) + { + result.totalMetadata++; - String id = dataMan.getMetadataId(dbms, ri.uuid); + String id = dataMan.getMetadataId(dbms, ri.uuid); - if (id == null) addMetadata(ri); + if (id == null) addMetadata(ri); else updateMetadata(ri, id); + } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for : "+ params.name); @@ -190,14 +195,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.addedMetadata++; } @@ -291,7 +296,7 @@ addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updatedMetadata++; } } Index: src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/localfilesystem/LocalFilesystemHarvester.java (working copy) @@ -183,48 +183,54 @@ List idsForHarvestingResult = new ArrayList(); //----------------------------------------------------------------------- //--- insert/update new metadata - for(String xmlFile : results) { - result.total++; - Element xml = null; - try { - System.out.println("reading file: " + xmlFile); - xml = Xml.loadFile(xmlFile); - } catch (JDOMException e) { // JDOM problem - System.out.println("Error loading XML from file " + xmlFile +", ignoring"); - e.printStackTrace(); - result.badFormat++; - continue; // skip this one - } catch (Exception e) { // some other error - System.out.println("Error retrieving XML from file " + xmlFile +", ignoring"); - e.printStackTrace(); - result.unretrievable++; - continue; // skip this one - } - String schema = dataMan.autodetectSchema(xml); - if(schema == null) { - result.unknownSchema++; - } - else { - String uuid = Common.retrieveUUID(xml, schema); - System.out.println("extracted uuid: " + uuid); - if(uuid == null) { + + dataMan.startIndexGroup(); + try { + for(String xmlFile : results) { + result.total++; + Element xml = null; + try { + System.out.println("reading file: " + xmlFile); + xml = Xml.loadFile(xmlFile); + } catch (JDOMException e) { // JDOM problem + System.out.println("Error loading XML from file " + xmlFile +", ignoring"); + e.printStackTrace(); result.badFormat++; + continue; // skip this one + } catch (Exception e) { // some other error + System.out.println("Error retrieving XML from file " + xmlFile +", ignoring"); + e.printStackTrace(); + result.unretrievable++; + continue; // skip this one } + String schema = dataMan.autodetectSchema(xml); + if(schema == null) { + result.unknownSchema++; + } else { - String id = dataMan.getMetadataId(dbms, uuid); - if (id == null) { - System.out.println("adding new metadata"); - id = addMetadata(xml, uuid, dbms, schema, localGroups, localCateg); - result.added++; + String uuid = Common.retrieveUUID(xml, schema); + System.out.println("extracted uuid: " + uuid); + if(uuid == null) { + result.badFormat++; } else { - System.out.println("updating existing metadata, id is: " + id); - updateMetadata(xml, id, dbms, schema, localGroups, localCateg); - result.updated++; + String id = dataMan.getMetadataId(dbms, uuid); + if (id == null) { + System.out.println("adding new metadata"); + id = addMetadata(xml, uuid, dbms, schema, localGroups, localCateg); + result.added++; + } + else { + System.out.println("updating existing metadata, id is: " + id); + updateMetadata(xml, id, dbms, schema, localGroups, localCateg); + result.updated++; + } + idsForHarvestingResult.add(id); } - idsForHarvestingResult.add(id); } } + } finally { + dataMan.endIndexGroup(); } if(!params.nodelete) { // @@ -255,7 +261,7 @@ addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } @@ -278,14 +284,14 @@ createDate, uuid, 1, null); int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, source); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, source); addPrivileges(id, localGroups, dbms); addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); return id; } Index: src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/arcsde/ArcSDEHarvester.java (working copy) @@ -168,43 +168,48 @@ List idsForHarvestingResult = new ArrayList(); //----------------------------------------------------------------------- //--- insert/update metadata - for(String metadata : metadataList) { - result.total++; - // create JDOM element from String-XML - Element metadataElement = Xml.loadString(metadata, false); - // transform ESRI output to ISO19115 - Element iso19115 = Xml.transform(metadataElement, ARC_TO_ISO19115_TRANSFORMER_LOCATION); - // transform ISO19115 to ISO19139 - Element iso19139 = Xml.transform(iso19115, ISO19115_TO_ISO19139_TRANSFORMER_LOCATION); + dataMan.startIndexGroup(); + try { + for(String metadata : metadataList) { + result.total++; + // create JDOM element from String-XML + Element metadataElement = Xml.loadString(metadata, false); + // transform ESRI output to ISO19115 + Element iso19115 = Xml.transform(metadataElement, ARC_TO_ISO19115_TRANSFORMER_LOCATION); + // transform ISO19115 to ISO19139 + Element iso19139 = Xml.transform(iso19115, ISO19115_TO_ISO19139_TRANSFORMER_LOCATION); - String schema = dataMan.autodetectSchema(iso19139); - if(schema == null) { - result.unknownSchema++; - } - // the xml is recognizable iso19139 format - else { - String uuid = Common.retrieveUUID(iso19139, schema); - if(uuid == null) { - result.badFormat++; + String schema = dataMan.autodetectSchema(iso19139); + if(schema == null) { + result.unknownSchema++; } + // the xml is recognizable iso19139 format else { - // - // add / update the metadata from this harvesting result - // - String id = dataMan.getMetadataId(dbms, uuid); - if (id == null) { - System.out.println("adding new metadata"); - id = addMetadata(iso19139, uuid, dbms, schema, localGroups, localCateg); - result.added++; + String uuid = Common.retrieveUUID(iso19139, schema); + if(uuid == null) { + result.badFormat++; } else { - System.out.println("updating existing metadata, id is: " + id); - updateMetadata(iso19139, id, dbms, schema, localGroups, localCateg); - result.updated++; + // + // add / update the metadata from this harvesting result + // + String id = dataMan.getMetadataId(dbms, uuid); + if (id == null) { + System.out.println("adding new metadata"); + id = addMetadata(iso19139, uuid, dbms, schema, localGroups, localCateg); + result.added++; + } + else { + System.out.println("updating existing metadata, id is: " + id); + updateMetadata(iso19139, id, dbms, schema, localGroups, localCateg); + result.updated++; + } + idsForHarvestingResult.add(id); } - idsForHarvestingResult.add(id); } } + } finally { + dataMan.endIndexGroup(); } // // delete locally existing metadata from the same source if they were @@ -232,7 +237,7 @@ addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); } /** * Inserts a metadata into the database. Lucene index is updated after insertion. @@ -253,14 +258,14 @@ createDate, uuid, 1, null); int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, source); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, source); addPrivileges(id, localGroups, dbms); addCategories(id, localCateg, dbms); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); return id; } @@ -358,4 +363,4 @@ public int badFormat; public int doesNotValidate; } -} \ No newline at end of file +} Index: src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/webdav/Harvester.java (working copy) @@ -108,15 +108,21 @@ } //----------------------------------------------------------------------- //--- insert/update new metadata - for(RemoteFile rf : files) { - result.total++; - String id = localUris.getID(rf.getPath()); - if (id == null) { - addMetadata(rf); + + dataMan.startIndexGroup(); + try { + for(RemoteFile rf : files) { + result.total++; + String id = localUris.getID(rf.getPath()); + if (id == null) { + addMetadata(rf); + } + else { + updateMetadata(rf, id); + } } - else { - updateMetadata(rf, id); - } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for : "+ params.name); } @@ -158,14 +164,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid, rf.getPath()); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid, rf.getPath()); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.added++; } @@ -285,7 +291,7 @@ dbms.execute("DELETE FROM MetadataCateg WHERE metadataId=?", Integer.parseInt(id)); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updated++; } } @@ -324,4 +330,4 @@ public boolean isMoreRecentThan(String localDate); } -//============================================================================= \ No newline at end of file +//============================================================================= Index: src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/ogcwxs/Harvester.java (working copy) @@ -298,15 +298,20 @@ List layers = xp.selectNodes(capa); log.info(" - Number of layers, featureTypes or Coverages found : " + layers.size()); + + dataMan.startIndexGroup(); + try { + for (Element layer : layers) { + WxSLayerRegistry s = addLayerMetadata (layer, capa); + if (s != null) + layersRegistry.add(s); + } + } finally { + dataMan.endIndexGroup(); + } - for (Element layer : layers) { - WxSLayerRegistry s = addLayerMetadata (layer, capa); - if (s != null) - layersRegistry.add(s); - } - - // Update ISO19119 for data/service links creation (ie. operatesOn element) - // The editor will support that but it will make quite heavy XML. + // Update ISO19119 for data/service links creation (ie. operatesOn element) + // The editor will support that but it will make quite heavy XML. md = addOperatesOnUuid (md, layersRegistry); } @@ -325,11 +330,11 @@ addPrivileges(id); addCategories(id); + dataMan.setHarvestedExt(dbms, iId, params.uuid, params.url); dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid, params.url); dbms.commit(); - //dataMan.indexMetadata(dbms, id); setHarvested update the index + //dataMan.indexMetadata(dbms, id); setTemplate update the index result.added ++; @@ -595,11 +600,11 @@ dataMan.setCategory (dbms, reg.id, params.datasetCategory); log.debug(" - Set Harvested."); - dataMan.setHarvested(dbms, iId, params.uuid, params.url); // FIXME : harvestUuid should be a MD5 string + dataMan.setHarvestedExt(dbms, iId, params.uuid, params.url); // FIXME : harvestUuid should be a MD5 string dbms.commit(); - //dataMan.indexMetadata(dbms, reg.id); setHarvested update the index + dataMan.indexMetadataGroup(dbms, reg.id); try { // Load bbox info for later use (eg. WMS thumbnails creation) Index: src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java =================================================================== --- src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java (revision 5814) +++ src/org/fao/geonet/kernel/harvest/harvester/oaipmh/Harvester.java (working copy) @@ -203,14 +203,19 @@ //----------------------------------------------------------------------- //--- insert/update new metadata - for(RecordInfo ri : records) - { - result.total++; + dataMan.startIndexGroup(); + try { + for(RecordInfo ri : records) + { + result.total++; - String id = localUuids.getID(ri.id); + String id = localUuids.getID(ri.id); - if (id == null) addMetadata(t, ri); + if (id == null) addMetadata(t, ri); else updateMetadata(t, ri, id); + } + } finally { + dataMan.endIndexGroup(); } log.info("End of alignment for : "+ params.name); @@ -252,14 +257,14 @@ int iId = Integer.parseInt(id); - dataMan.setTemplate(dbms, iId, "n", null); - dataMan.setHarvested(dbms, iId, params.uuid); + dataMan.setTemplateExt(dbms, iId, "n", null); + dataMan.setHarvestedExt(dbms, iId, params.uuid); addPrivileges(id); addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.added++; } @@ -453,7 +458,7 @@ addCategories(id); dbms.commit(); - dataMan.indexMetadata(dbms, id); + dataMan.indexMetadataGroup(dbms, id); result.updated++; } } Index: src/org/fao/geonet/services/metadata/MassiveNewOwner.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveNewOwner.java (revision 5814) +++ src/org/fao/geonet/services/metadata/MassiveNewOwner.java (working copy) @@ -130,8 +130,14 @@ dbms.commit(); // -- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); + context.info("Re-indexing metadata"); + dm.startIndexGroup(); + try { + for (int mdId : metadata) { + dm.indexMetadataGroup(dbms, Integer.toString(mdId)); + } + } finally { + dm.endIndexGroup(); } // -- for the moment just return the sizes - we could return the ids Index: src/org/fao/geonet/services/metadata/XslProcessing.java =================================================================== --- src/org/fao/geonet/services/metadata/XslProcessing.java (revision 5814) +++ src/org/fao/geonet/services/metadata/XslProcessing.java (working copy) @@ -49,6 +49,7 @@ import org.fao.geonet.kernel.MdInfo; import org.fao.geonet.kernel.SelectionManager; import org.fao.geonet.services.Utils; +import org.fao.geonet.util.ISODate; import org.jdom.Element; //============================================================================= @@ -95,7 +96,7 @@ try { Element processedMetadata = process(id, process, _appPath, params, - context, metadata, notFound, notOwner, notProcessFound); + context, metadata, notFound, notOwner, notProcessFound, false); if (processedMetadata == null) { throw new BadParameterEx("Processing failed", "Not found:" + notFound.size() + @@ -131,7 +132,7 @@ public static Element process(String id, String process, String appPath, Element params, ServiceContext context, Set metadata, Set notFound, Set notOwner, - Set notProcessFound) throws Exception { + Set notProcessFound, boolean useIndexGroup) throws Exception { GeonetContext gc = (GeonetContext) context .getHandlerContext(Geonet.CONTEXT_NAME); DataManager dataMan = gc.getDataManager(); @@ -173,9 +174,14 @@ xslParameter); // --- save metadata and return status - dataMan.updateMetadata(context.getUserSession(), dbms, id, - processedMetadata, false, null, context.getLanguage()); + dataMan.updateMetadataExt(dbms, id, processedMetadata, new ISODate().toString()); + if (useIndexGroup) { + dataMan.indexMetadataGroup(dbms, id); + } else { + dataMan.indexMetadata(dbms, id); + } + metadata.add(new Integer(id)); return processedMetadata; Index: src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java (revision 5814) +++ src/org/fao/geonet/services/metadata/MassiveUpdatePrivileges.java (working copy) @@ -131,8 +131,14 @@ } //--- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); + context.info("Re-indexing metadata"); + dm.startIndexGroup(); + try { + for (int mdId : metadata) { + dm.indexMetadata(dbms, Integer.toString(mdId)); + } + } finally { + dm.endIndexGroup(); } // -- for the moment just return the sizes - we could return the ids Index: src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java (revision 5814) +++ src/org/fao/geonet/services/metadata/MassiveUpdateCategories.java (working copy) @@ -116,8 +116,14 @@ dbms.commit(); //--- reindex metadata - for (int mdId : metadata) { - dm.indexMetadata(dbms, Integer.toString(mdId)); + context.info("Re-indexing metadata"); + dm.startIndexGroup(); + try { + for (int mdId : metadata) { + dm.indexMetadataGroup(dbms, Integer.toString(mdId)); + } + } finally { + dm.endIndexGroup(); } // -- for the moment just return the sizes - we could return the ids Index: src/org/fao/geonet/services/metadata/MassiveXslProcessing.java =================================================================== --- src/org/fao/geonet/services/metadata/MassiveXslProcessing.java (revision 5814) +++ src/org/fao/geonet/services/metadata/MassiveXslProcessing.java (working copy) @@ -109,13 +109,17 @@ SelectionManager sm = SelectionManager.getManager(session); synchronized(sm.getSelection("metadata")) { - for (Iterator iter = sm.getSelection("metadata").iterator(); iter - .hasNext();) { - String uuid = (String) iter.next(); - String id = dataMan.getMetadataId(dbms, uuid); - context.info("Processing metadata with id:" + id); + dataMan.startIndexGroup(); + try { + for (Iterator iter = sm.getSelection("metadata").iterator(); iter.hasNext();) { + String uuid = (String) iter.next(); + String id = dataMan.getMetadataId(dbms, uuid); + context.info("Processing metadata with id:" + id); - XslProcessing.process(id, process, _appPath, params, context, metadata, notFound, notOwner, notProcessFound); + XslProcessing.process(id, process, _appPath, params, context, metadata, notFound, notOwner, notProcessFound, true); + } + } finally { + dataMan.endIndexGroup(); } } Index: src/org/fao/geonet/services/metadata/ImportFromDir.java =================================================================== --- src/org/fao/geonet/services/metadata/ImportFromDir.java (revision 5814) +++ src/org/fao/geonet/services/metadata/ImportFromDir.java (working copy) @@ -152,6 +152,9 @@ private int standardImport(Element params, ServiceContext context) throws Exception { + GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); + DataManager dm = gc.getDataManager(); + String dir = Util.getParam(params, Params.DIR); File files[] = new File(dir).listFiles(mdFilter); @@ -159,8 +162,13 @@ if (files == null) throw new Exception("Directory not found: " + dir); - for(int i=0; i props, String luceneDir) throws Exception { + String url = props.get("url"); + String user = props.get("user"); + String passwd = props.get("password"); + System.out.println("Props received "+url+":"+user+":"+passwd); + + DataStore ds = null; + try { + if (url.contains("postgis")) { + ds = createPostgisDatastore(user, passwd, url); + } else if (url.contains("oracle")) { + ds = createOracleDatastore(user, passwd, url); + } + } catch (Exception e) { + logger.error("Failed to create datastore for "+url+". Will use shapefile instead."); + logger.error(e.getMessage()); + e.printStackTrace(); + } + + if (ds != null) return ds; + else return createShapefileDatastore(luceneDir); + } + + private DataStore createPostgisDatastore(String user, String passwd, String url) throws Exception { + + String[] values = url.split("/"); + + Map params = new HashMap(); + params.put(PostgisDataStoreFactory.DBTYPE.key, PostgisDataStoreFactory.DBTYPE.sample); + params.put(PostgisDataStoreFactory.DATABASE.key, getDatabase(url, values)); + params.put(PostgisDataStoreFactory.USER.key, user); + params.put(PostgisDataStoreFactory.PASSWD.key, passwd); + params.put(PostgisDataStoreFactory.HOST.key, getHost(url, values)); + params.put(PostgisDataStoreFactory.PORT.key, getPort(url, values)); + logger.info("Connecting using "+params); + + PostgisDataStoreFactory factory = new PostgisDataStoreFactory(); + DataStore ds = factory.createDataStore(params); + logger.info("NOTE: Using POSTGIS for spatial index"); + + return ds; + } + + private DataStore createOracleDatastore(String user, String passwd, String url) throws Exception { + + String[] values = url.split(":"); +/* + Map params = new HashMap(); + params.put(OracleDataStoreFactory.DBTYPE.key, OracleDataStoreFactory.DBTYPE.sample); + params.put(OracleDataStoreFactory.DATABASE.key, getDatabase(url, values)); + params.put(OracleDataStoreFactory.USER.key, user); + params.put(OracleDataStoreFactory.PASSWD.key, passwd); + params.put(OracleDataStoreFactory.HOST.key, getHost(url, values)); + params.put(OracleDataStoreFactory.PORT.key, getPort(url, values)); + + OracleDataStoreFactory factory = new OracleDataStoreFactory(); + DataStore ds = factory.createDataStore(params); + + return ds; +*/ + return null; + } + + private DataStore createShapefileDatastore(String luceneDir) throws Exception { + File file = new File(luceneDir + "/spatial/" + SPATIAL_INDEX_FILENAME + ".shp"); + file.getParentFile().mkdirs(); + logger.info("Creating shapefile "+file.getAbsolutePath()); + IndexedShapefileDataStore ds = new IndexedShapefileDataStore(file.toURI().toURL(), new URI("http://geonetwork.org"), false, true, IndexType.NONE, Charset.defaultCharset()); + CoordinateReferenceSystem crs = CRS.decode("EPSG:4326"); + + if (crs != null) { + ds.forceSchemaCRS(crs); + } + + if (!file.exists()) { + SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); + AttributeDescriptor geomDescriptor = new AttributeTypeBuilder().crs(DefaultGeographicCRS.WGS84).binding(MultiPolygon.class).buildDescriptor("the_geom"); + builder.setName(SPATIAL_INDEX_FILENAME); + builder.add(geomDescriptor); + builder.add(IDS_ATTRIBUTE_NAME, String.class); + ds.createSchema(builder.buildFeatureType()); + } + + logger.info("NOTE: Using shapefile for spatial index, this can be slow for larger catalogs"); + return ds; + } + + private String getDatabase(String url, String[] values) throws Exception { + if (url.contains("postgis")) { + return values[3]; + } else if (url.contains("oracle")) { + return values[5]; + } else { + throw new Exception("Unknown database in url "+url); + } + } + + private String getHost(String url, String[] values) throws Exception { + if (url.contains("postgis")) { + String value = values[2]; + return value.substring(0,value.indexOf(":")); + } else if (url.contains("oracle")) { + return values[3]; + } else { + throw new Exception("Unknown database in url "+url); + } + } + + private String getPort(String url, String values[]) throws Exception { + if (url.contains("postgis")) { + String value = values[2]; + return value.substring(value.indexOf(":")+1); + } else if (url.contains("oracle")) { + return values[4]; + } else { + throw new Exception("Unknown database in url "+url); + } + } } //============================================================================= Index: src/org/fao/geonet/lib/NetLib.java =================================================================== --- src/org/fao/geonet/lib/NetLib.java (revision 5814) +++ src/org/fao/geonet/lib/NetLib.java (working copy) @@ -25,12 +25,15 @@ import java.net.MalformedURLException; import java.net.URL; +import java.util.ArrayList; +import java.util.List; import java.util.Properties; import jeeves.server.context.ServiceContext; import jeeves.utils.Log; import jeeves.utils.XmlRequest; +import org.apache.commons.httpclient.auth.AuthPolicy; import org.apache.commons.httpclient.auth.AuthScope; import org.apache.commons.httpclient.Credentials; import org.apache.commons.httpclient.HttpClient; @@ -91,7 +94,7 @@ req.setProxyPort(Integer.parseInt(port)); if (username.trim().length()!=0) { req.setProxyCredentials(username, password); - } + } } } } @@ -133,6 +136,11 @@ client.getState().setProxyCredentials(scope, cred); } + List authPrefs = new ArrayList(2); + authPrefs.add(AuthPolicy.DIGEST); + authPrefs.add(AuthPolicy.BASIC); + // This will exclude the NTLM authentication scheme + client.getParams().setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, authPrefs); } } } @@ -200,18 +208,14 @@ } - //--------------------------------------------------------------------------- public boolean isUrlValid(String url) { - try - { + try { new URL(url); return true; - } - catch (MalformedURLException e) - { + } catch (MalformedURLException e) { return false; } } Index: src/org/fao/gast/gui/panels/config/dbms/MainPanel.java =================================================================== --- src/org/fao/gast/gui/panels/config/dbms/MainPanel.java (revision 5814) +++ src/org/fao/gast/gui/panels/config/dbms/MainPanel.java (working copy) @@ -151,6 +151,7 @@ new OraclePanel(), new MySQLPanel(), new PostgresPanel(), + new PostgisPanel(), new GenericPanel() //--- this must be the last one }; Index: src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java =================================================================== --- src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java (revision 0) +++ src/org/fao/gast/gui/panels/config/dbms/PostgisPanel.java (revision 0) @@ -0,0 +1,172 @@ +//============================================================================== +//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== United Nations (FAO-UN), United Nations World Food Programme (WFP) +//=== and United Nations Environment Programme (UNEP) +//=== +//=== This program is free software; you can redistribute it and/or modify +//=== it under the terms of the GNU General Public License as published by +//=== the Free Software Foundation; either version 2 of the License, or (at +//=== your option) any later version. +//=== +//=== This program is distributed in the hope that it will be useful, but +//=== WITHOUT ANY WARRANTY; without even the implied warranty of +//=== MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +//=== General Public License for more details. +//=== +//=== You should have received a copy of the GNU General Public License +//=== along with this program; if not, write to the Free Software +//=== Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA +//=== +//=== Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, +//=== Rome - Italy. email: geonetwork@osgeo.org +//============================================================================== + +package org.fao.gast.gui.panels.config.dbms; + +import java.util.StringTokenizer; +import javax.swing.JLabel; +import javax.swing.JTextField; +import org.dlib.gui.FlexLayout; +import org.fao.gast.lib.Lib; +import org.fao.gast.localization.Messages; + +//============================================================================== + +public class PostgisPanel extends DbmsPanel +{ + //--------------------------------------------------------------------------- + //--- + //--- Constructor + //--- + //--------------------------------------------------------------------------- + + /** + * + */ + private static final long serialVersionUID = -9039785892791080773L; + + public PostgisPanel() + { + FlexLayout fl = new FlexLayout(3,5); + fl.setColProp(1, FlexLayout.EXPAND); + setLayout(fl); + + add("0,0", new JLabel(Messages.getString("server"))); + add("0,1", new JLabel(Messages.getString("port"))); + add("0,2", new JLabel(Messages.getString("database"))); + add("0,3", new JLabel(Messages.getString("username"))); + add("0,4", new JLabel(Messages.getString("password"))); + + add("1,0", txtServer); + add("1,1", txtPort); + add("1,2", txtDatabase); + add("1,3", txtUser); + add("1,4", txtPass); + + add("2,2", new JLabel("(REQ)")); + add("2,3", new JLabel("(REQ)")); + add("2,4", new JLabel("(REQ)")); + + txtPort.setToolTipText(Messages.getString("postgres.defaultPort")); + } + + //--------------------------------------------------------------------------- + //--- + //--- DbmsPanel methods + //--- + //--------------------------------------------------------------------------- + + public String getLabel() { return "PostGIS"; } + + //--------------------------------------------------------------------------- + + public boolean matches(String url) + { + return url.startsWith(PREFIX); + } + + //--------------------------------------------------------------------------- + //--- jdbc:postgresql::/ + //--- jdbc:postgresql: + + public void retrieve() + { + String url = Lib.config.getDbmsURL(); + + //--- cut prefix + url = url.substring(PREFIX.length()); + + String server = ""; + String port = ""; + String database = url; + + if (url.startsWith("//") && url.length() > 2) + { + StringTokenizer st = new StringTokenizer(url.substring(2), "/"); + + server = st.nextToken(); + database = st.hasMoreTokens() ? st.nextToken() : ""; + + int pos = server.indexOf(":"); + + if (pos != -1) + { + port = server.substring(pos+1); + server = server.substring(0, pos); + } + } + + txtServer .setText(server); + txtPort .setText(port); + txtDatabase.setText(database); + txtUser .setText(Lib.config.getDbmsUser()); + txtPass .setText(Lib.config.getDbmsPassword()); + } + + //--------------------------------------------------------------------------- + + public void save() throws Exception + { + String server = txtServer .getText(); + String port = txtPort .getText(); + String database= txtDatabase.getText(); + + if (database.equals("")) + throw new Exception(Messages.getString("databaseNotEmpty")); + + if (!server.equals("") && !port.equals("") && !Lib.type.isInteger(port)) + throw new Exception(Messages.getString("portInt")); + + + if (server.equals("")) server="localhost"; + if (port.equals("")) port="5432"; + + String url = PREFIX +"//"+ server +":"+ port +"/"+ database; + + Lib.config.setDbmsDriver ("org.postgresql.Driver"); + Lib.config.setDbmsURL (url); + Lib.config.setDbmsUser (txtUser.getText()); + Lib.config.setDbmsPassword(txtPass.getText()); + Lib.config.removeActivator(); + Lib.config.save(); + } + + //--------------------------------------------------------------------------- + //--- + //--- Variables + //--- + //--------------------------------------------------------------------------- + + private JTextField txtServer = new JTextField(15); + private JTextField txtPort = new JTextField(6); + private JTextField txtDatabase= new JTextField(12); + private JTextField txtUser = new JTextField(12); + private JTextField txtPass = new JTextField(12); + + //--------------------------------------------------------------------------- + + private static final String PREFIX = "jdbc:postgis:"; +} + +//============================================================================== + Index: src/org/fao/gast/lib/DatabaseLib.java =================================================================== --- src/org/fao/gast/lib/DatabaseLib.java (revision 5814) +++ src/org/fao/gast/lib/DatabaseLib.java (working copy) @@ -413,7 +413,12 @@ if (cb != null) cb.creating(getObjectName(sql), getObjectType(sql)); - dbms.execute(sql); + Lib.log.info("Executing "+sql); + if (sql.trim().startsWith("SELECT")) { + dbms.select(sql); + } else { + dbms.execute(sql); + } sb = new StringBuffer(); } } @@ -483,6 +488,10 @@ else if (url.indexOf("postgresql") != -1) file = "create-db-postgres.sql"; + else if (url.indexOf("postgis") != -1) + file = "create-db-postgis.sql"; + + //--- load the dbms schema return Lib.text.load(appPath +SETUP_DIR+ "/sql/"+ file); Index: docs/build.xml =================================================================== --- docs/build.xml (revision 5814) +++ docs/build.xml (working copy) @@ -37,12 +37,13 @@ - + @@ -58,12 +59,13 @@ - + @@ -80,11 +82,13 @@ + Index: installer/build.xml =================================================================== --- installer/build.xml (revision 5814) +++ installer/build.xml (working copy) @@ -138,7 +138,7 @@ - + Index: gast/setup/sql/create-db-postgis.sql =================================================================== --- gast/setup/sql/create-db-postgis.sql (revision 0) +++ gast/setup/sql/create-db-postgis.sql (revision 0) @@ -0,0 +1,308 @@ +-- ====================================================================== +-- === Sql Script for Database : Geonet +-- === +-- === Build : 153 +-- ====================================================================== + +CREATE TABLE Relations + ( + id int, + relatedId int, + + primary key(id,relatedId) + ); + +-- ====================================================================== + +CREATE TABLE Categories + ( + id int, + name varchar(32) not null, + + primary key(id), + unique(name) + ); + +-- ====================================================================== + +CREATE TABLE Settings + ( + id int, + parentId int, + name varchar(32) not null, + value varchar(250), + + primary key(id), + + foreign key(parentId) references Settings(id) + ); + +-- ====================================================================== + +CREATE TABLE Languages + ( + id varchar(5), + name varchar(32) not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE Sources + ( + uuid varchar(250), + name varchar(250), + isLocal char(1) default 'y', + + primary key(uuid) + ); + +-- ====================================================================== + +CREATE TABLE IsoLanguages + ( + id int, + code varchar(3) not null, + + primary key(id), + unique(code) + ); + +-- ====================================================================== + +CREATE TABLE IsoLanguagesDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references IsoLanguages(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Regions + ( + id int, + north float not null, + south float not null, + west float not null, + east float not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE RegionsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Regions(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Users + ( + id int, + username varchar(32) not null, + password varchar(40) not null, + surname varchar(32), + name varchar(32), + profile varchar(32) not null, + address varchar(128), + city varchar(128), + state varchar(32), + zip varchar(16), + country varchar(128), + email varchar(128), + organisation varchar(128), + kind varchar(16), + + primary key(id), + unique(username) + ); + +-- ====================================================================== + +CREATE TABLE Operations + ( + id int, + name varchar(32) not null, + reserved char(1) default 'n' not null, + + primary key(id) + ); + +-- ====================================================================== + +CREATE TABLE OperationsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Operations(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Groups + ( + id int, + name varchar(32) not null, + description varchar(255), + email varchar(32), + referrer int, + + primary key(id), + unique(name), + + foreign key(referrer) references Users(id) + ); + +-- ====================================================================== + +CREATE TABLE GroupsDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Groups(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE UserGroups + ( + userId int, + groupId int, + + primary key(userId,groupId), + + foreign key(userId) references Users(id), + foreign key(groupId) references Groups(id) + ); + +-- ====================================================================== + +CREATE TABLE CategoriesDes + ( + idDes int, + langId varchar(5), + label varchar(96) not null, + + primary key(idDes,langId), + + foreign key(idDes) references Categories(id), + foreign key(langId) references Languages(id) + ); + +-- ====================================================================== + +CREATE TABLE Metadata + ( + id int, + uuid varchar(250) not null, + schemaId varchar(32) not null, + isTemplate char(1) default 'n' not null, + isHarvested char(1) default 'n' not null, + createDate varchar(24) not null, + changeDate varchar(24) not null, + data text not null, + source varchar(250) not null, + title varchar(255), + root varchar(255), + harvestUuid varchar(250) default null, + owner int not null, + groupOwner int default null, + harvestUri varchar(255) default null, + rating int default 0 not null, + popularity int default 0 not null, + displayorder int, + + primary key(id), + unique(uuid), + + foreign key(owner) references Users(id), + foreign key(groupOwner) references Groups(id) + ); + +CREATE INDEX MetadataNDX1 ON Metadata(uuid); +CREATE INDEX MetadataNDX2 ON Metadata(source); + +-- ====================================================================== + +CREATE TABLE MetadataCateg + ( + metadataId int, + categoryId int, + + primary key(metadataId,categoryId), + + foreign key(metadataId) references Metadata(id), + foreign key(categoryId) references Categories(id) + ); + +-- ====================================================================== + +CREATE TABLE OperationAllowed + ( + groupId int, + metadataId int, + operationId int, + + primary key(groupId,metadataId,operationId), + + foreign key(groupId) references Groups(id), + foreign key(metadataId) references Metadata(id), + foreign key(operationId) references Operations(id) + ); + +-- ====================================================================== + +CREATE TABLE MetadataRating + ( + metadataId int, + ipAddress varchar(32), + rating int not null, + + primary key(metadataId,ipAddress), + + foreign key(metadataId) references Metadata(id) + ); + +-- ====================================================================== + +CREATE TABLE spatialIndex + ( + fid int, + id int, + + primary key(fid) + + ); + +CREATE INDEX spatialIndexNDX1 ON spatialIndex(id); +SELECT AddGeometryColumn('spatialindex', 'the_geom', 4326, 'MULTIPOLYGON', 2 ); +CREATE INDEX spatialIndexNDX2 on spatialIndex USING GIST(the_geom); + +-- ====================================================================== + Index: build.xml =================================================================== --- build.xml (revision 5814) +++ build.xml (working copy) @@ -31,7 +31,7 @@ - + Index: bin/start-geonetwork.sh =================================================================== --- bin/start-geonetwork.sh (revision 5814) +++ bin/start-geonetwork.sh (working copy) @@ -8,4 +8,4 @@ # try changing the Xmx parameter if your machine has little RAM #java -Xms48m -Xmx256m -Xss2M -XX:MaxPermSize=128m -DSTOP.PORT=8079 -Djava.awt.headless=true -DSTOP.KEY=geonetwork -jar start.jar ../bin/jetty.xml & -java -Xms48m -Xmx512m -Xss2M -XX:MaxPermSize=128m -DSTOP.PORT=8079 -Djava.awt.headless=true -DSTOP.KEY=geonetwork -jar start.jar ../bin/jetty.xml > logs/output.log 2>&1 & +java -server -Xms1g -Xmx2g -Xss2M -XX:MaxPermSize=128m -DSTOP.PORT=8079 -Djava.awt.headless=true -DSTOP.KEY=geonetwork -jar start.jar ../bin/jetty.xml > logs/output.log 2>&1 & Index: web/geonetwork/WEB-INF/config-portal.xml =================================================================== --- web/geonetwork/WEB-INF/config-portal.xml (revision 5814) +++ web/geonetwork/WEB-INF/config-portal.xml (working copy) @@ -9,6 +9,7 @@ + @@ -25,6 +26,7 @@ + @@ -42,6 +44,7 @@ + @@ -107,6 +110,7 @@ + Index: web/geonetwork/WEB-INF/config.xml =================================================================== --- web/geonetwork/WEB-INF/config.xml (revision 5814) +++ web/geonetwork/WEB-INF/config.xml (working copy) @@ -184,6 +184,7 @@ + @@ -195,6 +196,7 @@ + @@ -844,6 +846,7 @@ + Index: web/geonetwork/WEB-INF/lib/gt-postgis-2.5.8.jar =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Property changes on: web/geonetwork/WEB-INF/lib/gt-postgis-2.5.8.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: web/geonetwork/WEB-INF/lib/commons-dbcp-1.2.2.jar =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Property changes on: web/geonetwork/WEB-INF/lib/commons-dbcp-1.2.2.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: web/geonetwork/WEB-INF/lib/commons-pool-1.3.jar =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Property changes on: web/geonetwork/WEB-INF/lib/commons-pool-1.3.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: web/geonetwork/WEB-INF/lib/gt-jdbc-2.5.8.jar =================================================================== Cannot display: file marked as a binary type. svn:mime-type = application/octet-stream Property changes on: web/geonetwork/WEB-INF/lib/gt-jdbc-2.5.8.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Index: web/geonetwork/WEB-INF/log4j.cfg =================================================================== --- web/geonetwork/WEB-INF/log4j.cfg (revision 5814) +++ web/geonetwork/WEB-INF/log4j.cfg (working copy) @@ -16,6 +16,7 @@ log4j.logger.jeeves = DEBUG, jeeves log4j.logger.jeeves.dbms = WARN +log4j.logger.jeeves.dbmspool = WARN ### JEEVES APPENDER ############################################################