diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c65fbd51148..0d596bc06df 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -33,9 +33,9 @@ jobs: if: ${{ github.event_name == 'pull_request' }} working-directory: docs/manual run: | - mike deploy --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --title "4.4" --alias-type=copy --update-aliases 4.4 latest - name: deploy latest docs to gh-pages branch if: ${{ github.event_name != 'pull_request' }} working-directory: docs/manual run: | - mike deploy --push --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest + mike deploy --push --title "4.4" --alias-type=copy --update-aliases 4.4 latest diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index cff43db21d2..8d473643803 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -37,7 +37,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Build with Maven run: | mvn -B -ntp -V install -DskipTests=true -Dmaven.javadoc.skip=true -Drelease -Pwith-doc @@ -63,7 +63,7 @@ jobs: - name: Set up Maven uses: stCarolas/setup-maven@v5 with: - maven-version: 3.6.3 + maven-version: 3.8.3 - name: Test with maven run: | mvn -B resources:resources@copy-index-schema-to-source -f web diff --git a/.gitignore b/.gitignore index 5bb1036a3a4..cbd4fe33eaa 100644 --- a/.gitignore +++ b/.gitignore @@ -40,7 +40,12 @@ release/jetty/* schemas/*/doc/*/*.rst schematrons/.build target/ + +# build and release transifex/transifex-format/ +build/ +web-ui/LICENSE +web-ui/tx # web-app, clear using: mvn -f web/pom.xml clean:clean@reset diff --git a/README.md b/README.md index 39cea698a43..65f57590c6a 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,6 @@ [![Build Status](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml/badge.svg?branch=main)](https://github.com/geonetwork/core-geonetwork/actions/workflows/linux.yml?query=branch%3Amain) -## Open Source Security Foundation (OpenSSF) best practices status -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) - ## Features * Immediate search access to local and distributed geospatial catalogues @@ -33,3 +30,6 @@ Developer documentation located in ``README.md`` files in the code-base: * General documentation for the project as a whole is in this [README.md](README.md) * [Software Development Documentation](/software_development/) provides instructions for setting up a development environment, building GeoNetwork, compiling user documentation, and making a releases. * Module specific documentation can be found in each module: + +## Open Source Security Foundation (OpenSSF) best practices status +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8626/badge)](https://www.bestpractices.dev/projects/8626) diff --git a/cachingxslt/pom.xml b/cachingxslt/pom.xml index d654a3e9917..dec57782cb8 100644 --- a/cachingxslt/pom.xml +++ b/cachingxslt/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 0dbdf0ab22d..8a5ffa601ef 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -31,7 +31,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT diff --git a/common/src/main/java/org/fao/geonet/utils/Xml.java b/common/src/main/java/org/fao/geonet/utils/Xml.java index c796fbfe4ec..9e0b20ee6fd 100644 --- a/common/src/main/java/org/fao/geonet/utils/Xml.java +++ b/common/src/main/java/org/fao/geonet/utils/Xml.java @@ -39,14 +39,7 @@ import org.fao.geonet.utils.nio.NioPathAwareEntityResolver; import org.fao.geonet.utils.nio.NioPathHolder; import org.fao.geonet.utils.nio.PathStreamSource; -import org.jdom.Attribute; -import org.jdom.Content; -import org.jdom.DocType; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.Text; +import org.jdom.*; import org.jdom.filter.ElementFilter; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; @@ -64,27 +57,14 @@ import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; -import javax.xml.transform.Result; -import javax.xml.transform.Source; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.URIResolver; +import javax.xml.transform.*; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.ValidatorHandler; -import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; -import java.io.DataInputStream; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PrintStream; -import java.io.StringReader; +import java.io.*; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; @@ -99,14 +79,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -403,22 +376,16 @@ public static Element transform(Element xml, Path styleSheetPath, Map params, OutputStream out) throws Exception { StreamResult resStream = new StreamResult(out); - transform(xml, styleSheetPath, resStream, null); + transform(xml, styleSheetPath, resStream, params); out.flush(); } - - public static void transformXml(Element xml, Path styleSheetPath, OutputStream out) throws Exception { - StreamResult resStream = new StreamResult(out); - Map map = new HashMap<>(); - map.put("geonet-force-xml", "xml"); - transform(xml, styleSheetPath, resStream, map); - out.flush(); + public static void transform(Element xml, Path styleSheetPath, OutputStream out) throws Exception { + transform(xml, styleSheetPath, new HashMap<>(), out); } - //-------------------------------------------------------------------------- /** * Transforms an xml tree putting the result to a stream - no parameters. @@ -484,6 +451,9 @@ protected static Path resolvePath(Source s) throws URISyntaxException { /** * Transforms an xml tree putting the result to a stream with optional parameters. + *

+ * Add a geonet-force-xml parameter to force the formatting to be xml. + * The preferred method is to define it using xsl:output. */ public static void transform(Element xml, Path styleSheetPath, Result result, Map params) throws Exception { @@ -515,13 +485,13 @@ protected static Path resolvePath(Source s) throws URISyntaxException { t.setParameter(param.getKey(), param.getValue()); } - if (params.containsKey("geonet-force-xml")) { - ((Controller) t).setOutputProperty("indent", "yes"); - ((Controller) t).setOutputProperty("method", "xml"); - ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "3"); + if (params.containsKey("geonet-force-xml")) { + ((Controller) t).setOutputProperty("indent", "yes"); + ((Controller) t).setOutputProperty("method", "xml"); + ((Controller) t).setOutputProperty("{http://saxon.sf.net/}indent-spaces", "2"); + } } - } t.transform(srcXml, result); } } diff --git a/core/pom.xml b/core/pom.xml index f9a6f76215e..e75a312c7c7 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java index 14e09a62b12..4139d045ac5 100644 --- a/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/SchemaManager.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -1383,7 +1383,6 @@ private void checkAppSupported(Element schemaPluginCatRoot) throws Exception { " requires max Geonetwork version: " + majorAppVersionSupported + ", current is: " + version + ". Skip load schema."); removes.add(schemaInfo.getKey()); - continue; } } @@ -1901,7 +1900,7 @@ public boolean accept(Path entry) throws IOException { try (DirectoryStream schemaplugins = Files.newDirectoryStream(schemaPluginDir, xsdFilter)) { boolean missingXsdFiles = true; for (Path schemaplugin : schemaplugins) { - IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin), false); + IO.copyDirectoryOrFile(schemaplugin, webAppDirSchemaXSD.resolve(schemaplugin.getFileName()), false); missingXsdFiles = false; } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java index 96b0aa34ee1..16cfc9719ae 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/IMetadataStatus.java @@ -69,11 +69,14 @@ public interface IMetadataStatus { MetadataStatus setStatusExt(ServiceContext context, int id, int status, ISODate changeDate, String changeMessage) throws Exception; /** - * Set status of metadata id and do not reindex metadata id afterwards. + * Set status of metadata id and reindex metadata id afterwards based on updateIndex flag + * + * @param status metadata status to set + * @param updateIndex index update flag * * @return the saved status entity object */ - MetadataStatus setStatusExt(MetadataStatus status) throws Exception; + MetadataStatus setStatusExt(MetadataStatus status, boolean updateIndex) throws Exception; /** * Set status of metadata id and reindex metadata id afterwards. diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java index 150bd65a81c..cf5f7fc972d 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/base/BaseMetadataStatus.java @@ -160,9 +160,11 @@ public MetadataStatus setStatus(ServiceContext context, int id, int status, ISOD } @Override - public MetadataStatus setStatusExt(MetadataStatus metatatStatus) throws Exception { + public MetadataStatus setStatusExt(MetadataStatus metatatStatus, boolean updateIndex) throws Exception { metadataStatusRepository.save(metatatStatus); - metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + if (updateIndex) { + metadataIndexer.indexMetadata(metatatStatus.getMetadataId() + "", true, IndexingMode.full); + } return metatatStatus; } diff --git a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java index 040acbf4aca..c5cc81ad1da 100644 --- a/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java +++ b/core/src/main/java/org/fao/geonet/kernel/datamanager/draft/DraftMetadataUtils.java @@ -589,7 +589,7 @@ protected String createDraft(ServiceContext context, String templateId, String g List listOfStatusChange = new ArrayList<>(1); listOfStatusChange.add(metadataStatus); - sa.onStatusChange(listOfStatusChange); + sa.onStatusChange(listOfStatusChange, true); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java index e8678d483ab..983b9e44d94 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/DefaultStatusActions.java @@ -136,7 +136,7 @@ public void onEdit(int id, boolean minorEdit) throws Exception { * @return * @throws Exception */ - public Map onStatusChange(List listOfStatus) throws Exception { + public Map onStatusChange(List listOfStatus, boolean updateIndex) throws Exception { if (listOfStatus.stream().map(MetadataStatus::getMetadataId).distinct().count() != listOfStatus.size()) { throw new IllegalArgumentException("Multiple status update received on the same metadata"); @@ -179,16 +179,7 @@ public Map onStatusChange(List listOf context.debug("Change status of metadata with id " + status.getMetadataId() + " from " + currentStatusId + " to " + statusId); // we know we are allowed to do the change, apply any side effects - boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId); - - // inform content reviewers if the status is submitted - try { - notify(getUserToNotify(status), status); - } catch (Exception e) { - context.warning(String.format( - "Failed to send notification on status change for metadata %s with status %s. Error is: %s", - status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); - } + boolean deleted = applyStatusChange(status.getMetadataId(), status, statusId, updateIndex); if (deleted) { results.put(status.getMetadataId(), StatusChangeType.DELETED); @@ -208,6 +199,15 @@ public Map onStatusChange(List listOf } } + // inform content reviewers if the status is submitted + try { + notify(getUserToNotify(status), status); + } catch (Exception e) { + context.warning(String.format( + "Failed to send notification on status change for metadata %s with status %s. Error is: %s", + status.getMetadataId(), status.getStatusValue().getId(), e.getMessage())); + } + } return results; @@ -218,10 +218,10 @@ public Map onStatusChange(List listOf * eg. if APPROVED, publish a record, * if RETIRED, unpublish or delete the record. */ - private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId) throws Exception { + private boolean applyStatusChange(int metadataId, MetadataStatus status, String toStatusId, boolean updateIndex) throws Exception { boolean deleted = false; if (!deleted) { - metadataStatusManager.setStatusExt(status); + metadataStatusManager.setStatusExt(status, updateIndex); } return deleted; } diff --git a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java index 9a4aecff585..047c0b1b33a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java +++ b/core/src/main/java/org/fao/geonet/kernel/metadata/StatusActions.java @@ -38,6 +38,6 @@ public interface StatusActions { public void onEdit(int id, boolean minorEdit) throws Exception; - public Map onStatusChange(List status) throws Exception; + public Map onStatusChange(List status, boolean updateIndex) throws Exception; } diff --git a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java index e06b42e724b..bfd783bc5f4 100644 --- a/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java +++ b/core/src/main/java/org/fao/geonet/kernel/search/EsSearchManager.java @@ -552,6 +552,7 @@ private void checkIndexResponse(BulkResponse bulkItemResponses, static { arrayFields = ImmutableSet.builder() .add(Geonet.IndexFieldNames.RECORDLINK) + .add("geom") .add("topic") .add("cat") .add("keyword") @@ -664,14 +665,7 @@ public ObjectNode documentToJson(Element xml) { || propertyName.endsWith("DateForResource") || propertyName.startsWith("cl_"); - if (name.equals("geom")) { - try { - doc.set("geom", mapper.readTree(nodeElements.get(0).getTextNormalize())); - } catch (IOException e) { - LOGGER.error("Parsing invalid geometry for JSON node {}. Error is: {}", - nodeElements.get(0).getTextNormalize(), e.getMessage()); - } - } else if (isArray) { + if (isArray) { ArrayNode arrayNode = doc.putArray(propertyName); for (Element node : nodeElements) { if (isObject) { @@ -817,8 +811,8 @@ public SearchResponse query(JsonNode jsonRequest, Set includedFields, return client.query(defaultIndex, jsonRequest, null, includedFields, from, size); } - public Map getFieldsValues(String id, Set fields) throws IOException { - return client.getFieldsValues(defaultIndex, id, fields); + public Map getFieldsValues(String id, Set fields, String language) throws Exception { + return client.getFieldsValues(defaultIndex, id, fields, language); } diff --git a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java index 680a540ff92..220f024097c 100644 --- a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java +++ b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/GeonetworkJwtAuthenticationProvider.java @@ -52,7 +52,7 @@ import org.springframework.security.oauth2.core.user.DefaultOAuth2User; import org.springframework.security.oauth2.core.user.OAuth2User; import org.springframework.security.oauth2.jwt.Jwt; -import org.springframework.security.oauth2.server.resource.BearerTokenAuthenticationToken; +import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthenticationToken; import org.springframework.security.oauth2.server.resource.BearerTokenError; import org.springframework.security.oauth2.server.resource.BearerTokenErrorCodes; import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter; diff --git a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java index 4e75a1282b2..15252009821 100644 --- a/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java +++ b/core/src/main/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCache.java @@ -10,19 +10,13 @@ */ public class UserInfoCache { - static Object lockobj = new Object(); + static final Object lockobj = new Object(); Map cache = new HashMap<>(); public UserInfoCacheItem getItem(String accessKey) { synchronized (lockobj) { - if (!cache.containsKey(accessKey)) - return null; - UserInfoCacheItem item = cache.get(accessKey); - if (item.isExpired()) { - cache.remove(accessKey); - return null; - } - return item; + cache.entrySet().removeIf(e -> e.getValue().isExpired()); + return cache.get(accessKey); } } diff --git a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java index 9b6066e97b4..d4482cdcd7a 100644 --- a/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java +++ b/core/src/main/java/org/fao/geonet/kernel/setting/Settings.java @@ -71,7 +71,6 @@ public class Settings { public static final String SYSTEM_ENABLE_ALL_THESAURUS = "system/metadata/allThesaurus"; public static final String SYSTEM_METADATA_THESAURUS_NAMESPACE = "system/metadata/thesaurusNamespace"; public static final String SYSTEM_METADATA_VALIDATION_REMOVESCHEMALOCATION = "system/metadata/validation/removeSchemaLocation"; - public static final String SYSTEM_METADATA_HISTORY_ENABLED = "system/metadata/history/enabled"; public static final GNSetting SYSTEM_SITE_SVNUUID = new GNSetting("system/site/svnUuid", true); public static final String SYSTEM_INTRANET_NETWORK = "system/intranet/network"; public static final String SYSTEM_INTRANET_NETMASK = "system/intranet/netmask"; @@ -139,6 +138,8 @@ public class Settings { public static final String METADATA_IMPORT_RESTRICT = "metadata/import/restrict"; public static final String METADATA_IMPORT_USERPROFILE = "metadata/import/userprofile"; public static final String METADATA_BATCH_EDITING_ACCESS_LEVEL = "metadata/batchediting/accesslevel"; + public static final String METADATA_HISTORY_ENABLED = "metadata/history/enabled"; + public static final String METADATA_HISTORY_ACCESS_LEVEL = "metadata/history/accesslevel"; public static final String METADATA_PUBLISHED_DELETE_USERPROFILE = "metadata/delete/profilePublishedMetadata"; public static final String METADATA_PUBLISH_USERPROFILE = "metadata/publication/profilePublishMetadata"; public static final String METADATA_UNPUBLISH_USERPROFILE = "metadata/publication/profileUnpublishMetadata"; diff --git a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java index 5686f54e12a..e904e8114a8 100644 --- a/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java +++ b/core/src/main/java/org/fao/geonet/kernel/url/UrlAnalyzer.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2019 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -39,10 +39,6 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; import java.util.Optional; @@ -78,21 +74,19 @@ public void processMetadata(Element element, AbstractMetadata md) throws org.jdo if (schemaPlugin instanceof LinkAwareSchemaPlugin) { metadataLinkRepository - .findAll(metadatalinksTargetting(md)) - .stream() - .forEach(metadatalink -> { - metadatalink.getLink().getRecords().remove(metadatalink); - }); + .findAll(metadatalinksTargetting(md)) + .stream() + .forEach(metadatalink -> metadatalink.getLink().getRecords().remove(metadatalink)); entityManager.flush(); ((LinkAwareSchemaPlugin) schemaPlugin).createLinkStreamer(new ILinkBuilder() { @Override public Link found(String url) { - Link link = linkRepository.findOneByUrl(url); - if (link != null) { - return link; + Optional linkOptional = linkRepository.findOneByUrl(url); + if (linkOptional.isPresent()) { + return linkOptional.get(); } else { - link = new Link(); + Link link = new Link(); link.setUrl(url); linkRepository.save(link); return link; @@ -102,7 +96,7 @@ public Link found(String url) { @Override public void persist(Link link, AbstractMetadata metadata) { MetadataLink metadataLink = new MetadataLink(); - metadataLink.setMetadataId(new Integer(metadata.getId())); + metadataLink.setMetadataId(metadata.getId()); metadataLink.setMetadataUuid(metadata.getUuid()); metadataLink.setLink(link); link.getRecords().add(metadataLink); @@ -115,10 +109,10 @@ public void persist(Link link, AbstractMetadata metadata) { public void purgeMetataLink(Link link) { metadataLinkRepository - .findAll(metadatalinksTargetting(link)) - .stream() - .filter(metadatalink -> isReferencingAnUnknownMetadata((MetadataLink)metadatalink)) - .forEach(metadataLinkRepository::delete); + .findAll(metadatalinksTargetting(link)) + .stream() + .filter(this::isReferencingAnUnknownMetadata) + .forEach(metadataLinkRepository::delete); entityManager.flush(); } @@ -136,28 +130,16 @@ public void testLink(Link link) { } private Specification metadatalinksTargetting(Link link) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.link).get(Link_.id), link.getId()); } private Specification metadatalinksTargetting(AbstractMetadata md) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder) { - return criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); - } - }; + return (root, criteriaQuery, criteriaBuilder) -> criteriaBuilder.equal(root.get(MetadataLink_.metadataId), md.getId()); } private boolean isReferencingAnUnknownMetadata(MetadataLink metadatalink) { Optional metadata = metadataRepository.findById(metadatalink.getMetadataId()); - return !metadata.isPresent(); + return metadata.isEmpty(); } - - } diff --git a/core/src/main/java/org/fao/geonet/util/MailUtil.java b/core/src/main/java/org/fao/geonet/util/MailUtil.java index fc0c743c6fe..517a292b99f 100644 --- a/core/src/main/java/org/fao/geonet/util/MailUtil.java +++ b/core/src/main/java/org/fao/geonet/util/MailUtil.java @@ -364,9 +364,6 @@ private static void configureBasics(String hostName, Integer smtpPort, email.setAuthenticator(new DefaultAuthenticator(username, password)); } - - email.setDebug(true); - if (tls != null && tls) { email.setStartTLSEnabled(tls); email.setStartTLSRequired(tls); diff --git a/core/src/main/java/org/fao/geonet/util/XslUtil.java b/core/src/main/java/org/fao/geonet/util/XslUtil.java index 256129da48b..4329d479afd 100644 --- a/core/src/main/java/org/fao/geonet/util/XslUtil.java +++ b/core/src/main/java/org/fao/geonet/util/XslUtil.java @@ -631,13 +631,19 @@ public static MetadataResourceContainer getResourceContainerDescription(String m Store store = BeanFactoryAnnotationUtils.qualifiedBeanOfType(ApplicationContextHolder.get().getBeanFactory(), Store.class, "filesystemStore"); if (store != null) { - if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { - ServiceContext context = ServiceContext.get(); - return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); - } else { - // Return an empty object which should not be used because the folder is not enabled. - return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + try { + if (store.getResourceManagementExternalProperties() != null && store.getResourceManagementExternalProperties().isFolderEnabled()) { + ServiceContext context = ServiceContext.get(); + return store.getResourceContainerDescription(ServiceContext.get(), metadataUuid, approved); + } else { + // Return an empty object which should not be used because the folder is not enabled. + return new FilesystemStoreResourceContainer(metadataUuid, -1, null, null, null, approved); + } + } catch (RuntimeException e) { + Log.error(Geonet.RESOURCES, "Could not locate resource in getResourceContainerDescription due to runtime exception", e); + return null; } + } Log.error(Geonet.RESOURCES, "Could not locate a Store bean in getResourceContainerDescription"); return null; @@ -843,11 +849,9 @@ public static String getIndexField(Object appName, Object uuid, Object field, Ob try { Set fields = new HashSet<>(); fields.add(fieldname); - // TODO: Multilingual fields - final Map values = searchManager.getFieldsValues(id, fields); + final Map values = searchManager.getFieldsValues(id, fields, language); return values.get(fieldname); } catch (Exception e) { - e.printStackTrace(); Log.error(Geonet.GEONETWORK, "Failed to get index field '" + fieldname + "' value on '" + id + "', caused by " + e.getMessage()); } return ""; diff --git a/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCacheTest.java b/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCacheTest.java new file mode 100644 index 00000000000..475171a5ae2 --- /dev/null +++ b/core/src/test/java/org/fao/geonet/kernel/security/openidconnect/bearer/UserInfoCacheTest.java @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2022 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ +package org.fao.geonet.kernel.security.openidconnect.bearer; + +import com.google.common.collect.Lists; +import junit.framework.TestCase; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; + +import java.time.Instant; +import java.util.Collections; + +public class UserInfoCacheTest extends TestCase { + + private OAuth2User user1 = new DefaultOAuth2User(Lists.newArrayList(), Collections.singletonMap("name", "frank"), "name"); + private OAuth2User user2 = new DefaultOAuth2User(Lists.newArrayList(), Collections.singletonMap("name", "jeff"), "name"); + + public void testCache() { + UserInfoCacheItem item1 = new UserInfoCacheItem("a", Instant.now().plusSeconds(1000), user1, Lists.newArrayList()); + UserInfoCacheItem item2 = new UserInfoCacheItem("b", Instant.now().plusSeconds(1000), user2, Lists.newArrayList()); + UserInfoCacheItem item3 = new UserInfoCacheItem("c", Instant.now().minusSeconds(1000), user2, Lists.newArrayList()); + + UserInfoCache cache = new UserInfoCache(); + cache.putItem(item1); + cache.putItem(item2); + cache.putItem(item3); + + assertEquals(3, cache.cache.size()); + assertEquals(item1, cache.getItem("a")); + assertEquals(item2, cache.getItem("b")); + assertNull(cache.getItem("c")); + assertEquals(2, cache.cache.size()); + } +} diff --git a/core/src/test/resources/org/fao/geonet/api/Messages.properties b/core/src/test/resources/org/fao/geonet/api/Messages.properties index 7146a6b8f93..dfa1637b7d5 100644 --- a/core/src/test/resources/org/fao/geonet/api/Messages.properties +++ b/core/src/test/resources/org/fao/geonet/api/Messages.properties @@ -54,8 +54,8 @@ user_password_changed='%s' password was updated. user_password_notchanged=A problem occurred trying to change '%s' password. Contact the helpdesk. user_password_invalid_changekey='%s' is an invalid change key for '%s'. Change keys are only valid for one day. user_registered=User '%s' registered. -user_with_that_email_found=A user with this email '%s' already exists. -user_with_that_username_found=A user with this username '%s' already exists. +user_with_that_email_found=A user with this email or username already exists. +user_with_that_username_found=A user with this email or username already exists. register_email_admin_subject=%s / New account for %s as %s register_email_admin_message=Dear Admin,\n\ Newly registered user %s has requested %s access for %s.\n\ @@ -118,6 +118,12 @@ Message: \n\ View record: \n\ {{link}} # TODO: Link to DOI creation panel +metadata_published_subject=%s / Metadata publication +metadata_published_text=The following records have been processed:\n\ +

+metadata_published_record_text=
  • The metadata {{index:resourceTitleObject}} has been published.
  • +metadata_unpublished_record_text=
  • The metadata {{index:resourceTitleObject}} has been unpublished.
  • +metadata_approved_published_record_text=
  • The metadata {{index:resourceTitleObject}} has been published as a new version.
  • api.groups.group_not_found=Group with ID ''{0}'' not found in this catalog. user_watchlist_subject=%s / %d updates in your watch list since %s @@ -171,9 +177,19 @@ exception.doi.missingSavedquery.description="Record ''{0}'' is in schema ''{1}'' exception.doi.recordNotConformantMissingInfo=Record is not conform with DataCite format exception.doi.recordNotConformantMissingInfo.description=Record ''{0}'' is not conform with DataCite format. {1} mandatory field(s) missing. {2} exception.doi.recordNotConformantMissingMandatory=Record is not conform with DataCite validation rules for mandatory fields -exception.doi.recordNotConformantMissingMandatory.description=Record ''{0}'' is not conform with DataCite validation rules for mandatory fields. Error is: {1}. Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. Check the DataCite format output and adapt the record content to add missing information. +exception.doi.recordNotConformantMissingMandatory.description=Record ''{0}'' is not conform with DataCite validation rules for mandatory fields. Error is: {1}. Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. Check the DataCite format output and adapt the record content to add missing information. exception.doi.recordInvalid=Record converted to DataCite format is invalid. -exception.doi.recordInvalid.description=Record ''{0}'' converted to DataCite format is invalid. Error is: {1}. Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. Check the DataCite format output and adapt the record content to add missing information. +exception.doi.recordInvalid.description=Record ''{0}'' converted to DataCite format is invalid. Error is: {1}. Required fields in DataCite are: identifier, creators, titles, publisher, publicationYear, resourceType. Check the DataCite format output and adapt the record content to add missing information. +exception.doi.serverErrorCreate=Error creating DOI +exception.doi.serverErrorCreate.description=Error creating DOI: {0} +exception.doi.serverErrorRetrieve=Error retrieving DOI +exception.doi.serverErrorRetrieve.description=Error retrieving DOI: {0} +exception.doi.serverErrorDelete=Error deleting DOI +exception.doi.serverErrorDelete.description=Error deleting DOI: {0} +exception.doi.serverErrorUnregister=Error unregistering DOI +exception.doi.serverErrorUnregister.description=Error unregistering DOI: {0} +exception.doi.notSupportedOperationError=Operation not supported +exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Metadata imported with ID '%s' api.metadata.import.importedWithUuid=Metadata imported with UUID '%s' api.metadata.import.importedFromXMLWithUuid=Metadata imported from XML with UUID '%s' diff --git a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties index d4dfb7add40..8a248ea9661 100644 --- a/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties +++ b/core/src/test/resources/org/fao/geonet/api/Messages_fre.properties @@ -45,8 +45,8 @@ user_password_sent=Si l''utilisateur existe, vous recevrez un courriel contenant user_password_changed=Le mot de passe de %s a \u00E9t\u00E9 mis \u00E0 jour. user_password_notchanged=\u00C9chec lors du changement de mot de passe de %s. Contactez le support. user_password_invalid_changekey=%s est une cl\u00E9 invalide pour %s. Les cl\u00E9s ne sont valides que pendant une journ\u00E9e. -user_with_that_email_found=Un utilisateur avec cette adresse email %s existe d\u00E9j\u00E0. -user_with_that_username_found=Un utilisateur avec ce nom d''utilisateur %s existe d\u00E9j\u00E0. +user_with_that_email_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. +user_with_that_username_found=Un utilisateur avec cette adresse email ou ce nom d''utilisateur existe d\u00E9j\u00E0. register_email_admin_subject=%s / Cr\u00E9ation de compte pour %s en tant que %s register_email_admin_message=Cher administrateur,\n\ L'utilisateur %s vient de demander une cr\u00E9ation de compte pour %s.\n\ @@ -112,6 +112,7 @@ metadata_published_text=Les fiches suivantes ont \u00E9t\u00E9 trait\u00E9es:\n\ metadata_published_record_text=
  • La m\u00E9tadonn\u00E9e {{index:resourceTitleObject}} a \u00E9t\u00E9 publi\u00E9e.
  • metadata_unpublished_record_text=
  • La m\u00E9tadonn\u00E9e {{index:resourceTitleObject}} a \u00E9t\u00E9 d\u00E9publi\u00E9e.
  • +metadata_approved_published_record_text=
  • Une nouvelle version de la m\u00E9tadonn\u00E9e {{index:resourceTitleObject}} a \u00E9t\u00E9 publi\u00E9e.
  • api.groups.group_not_found=Le groupe avec l''identifiant ''{0}'' n''a pas \u00E9t\u00E9 trouv\u00E9 dans le catalogue. user_watchlist_subject=%s / %d mises \u00e0 jour dans vos fiches surveill\u00E9es %s @@ -166,6 +167,16 @@ exception.doi.recordNotConformantMissingMandatory=La fiche n''est pas conforme a exception.doi.recordNotConformantMissingMandatory.description=La fiche ''{0}'' n''est pas conforme aux r\u00E8gles de validation DataCite pour les champs obligatoires. L''erreur est: {1}. Les champs obligatoires dans DataCite sont : identifiant, cr\u00E9ateurs, titres, \u00E9diteur, publicationYear, resourceType. V\u00E9rifiez la sortie au format DataCite et adaptez le contenu de la fiche pour ajouter les informations manquantes. exception.doi.recordInvalid=Le fiche converti n''est pas conforme au format DataCite exception.doi.recordInvalid.description=Le fiche ''{0}'' converti n''est pas conforme au format DataCite. L''erreur est: {1}. Les champs obligatoires dans DataCite sont : identifiant, cr\u00E9ateurs, titres, \u00E9diteur, ann\u00E9e de publication, type de ressource. V\u00E9rifier la sortie au format DataCite et adapter le contenu de la fiche pour ajouter les informations manquantes. +exception.doi.serverErrorCreate=Erreur lors de la cr\u00E9ation du DOI +exception.doi.serverErrorCreate.description=Erreur lors de la cr\u00E9ation du DOI : {0} +exception.doi.serverErrorRetrieve=Erreur lors de la r\u00E9cup\u00E9ration du DOI +exception.doi.serverErrorRetrieve.description=Erreur lors de la r\u00E9cup\u00E9ration du DOI : {0} +exception.doi.serverErrorDelete=Erreur lors de la suppression du DOI +exception.doi.serverErrorDelete.description=Erreur lors de la suppression du DOI : {0} +exception.doi.serverErrorUnregister=Erreur lors de la d\u00E9sinscription du DOI +exception.doi.serverErrorUnregister.description=Erreur lors de la d\u00E9sinscription du DOI {0} +exception.doi.notSupportedOperationError=Op\u00E9ration non prise en charge +exception.doi.notSupportedOperationError.description={0} api.metadata.import.importedWithId=Fiche import\u00E9e avec l'ID '%s' api.metadata.import.importedWithUuid=Fiche import\u00E9e avec l'UUID '%s' api.metadata.import.importedFromXMLWithUuid=Fiche import\u00E9e depuis le fichier XML avec l'UUID '%s' diff --git a/csw-server/pom.xml b/csw-server/pom.xml index 5ba962cc2a5..d3ebba1d4f1 100644 --- a/csw-server/pom.xml +++ b/csw-server/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/csw-server/src/main/java/org/fao/geonet/component/csw/GetRecordById.java b/csw-server/src/main/java/org/fao/geonet/component/csw/GetRecordById.java index 43d7f725f7b..078fd59cbce 100644 --- a/csw-server/src/main/java/org/fao/geonet/component/csw/GetRecordById.java +++ b/csw-server/src/main/java/org/fao/geonet/component/csw/GetRecordById.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -30,14 +30,9 @@ import jeeves.server.context.ServiceContext; -import org.apache.commons.lang.NotImplementedException; import org.fao.geonet.kernel.SchemaManager; -import org.fao.geonet.kernel.setting.SettingInfo; -import org.fao.geonet.utils.Log; import org.fao.geonet.Util; -import org.fao.geonet.utils.Xml; -import org.apache.commons.lang.StringUtils; import org.fao.geonet.GeonetContext; import org.fao.geonet.constants.Geonet; import org.fao.geonet.csw.common.Csw; @@ -54,7 +49,6 @@ import org.fao.geonet.kernel.csw.CatalogService; import org.fao.geonet.kernel.csw.services.AbstractOperation; import org.fao.geonet.kernel.csw.services.getrecords.SearchController; -import org.fao.geonet.domain.Pair; import org.fao.geonet.lib.Lib; import org.jdom.Element; import org.springframework.beans.factory.annotation.Autowired; @@ -76,12 +70,14 @@ public class GetRecordById extends AbstractOperation implements CatalogService { //--------------------------------------------------------------------------- static final String NAME = "GetRecordById"; - private SearchController _searchController; + + @Autowired + private SearchController searchController; @Autowired - private CatalogConfiguration _catalogConfig; + private CatalogConfiguration catalogConfig; @Autowired - private SchemaManager _schemaManager; + private SchemaManager schemaManager; @Autowired public GetRecordById(ApplicationContext applicationContext) { @@ -106,7 +102,7 @@ public Element execute(Element request, ServiceContext context) throws CatalogEx checkVersion(request); //-- Added for CSW 2.0.2 compliance by warnock@awcubed.com checkOutputFormat(request); - String outSchema = OutputSchema.parse(request.getAttributeValue("outputSchema"), _schemaManager); + String outSchema = OutputSchema.parse(request.getAttributeValue("outputSchema"), schemaManager); //-------------------------------------------------------- ElementSetName setName = getElementSetName(request, ElementSetName.SUMMARY); @@ -135,8 +131,8 @@ public Element execute(Element request, ServiceContext context) throws CatalogEx Lib.resource.checkPrivilege(context, id, ReservedOperation.view); final String displayLanguage = context.getLanguage(); - Element md = SearchController.retrieveMetadata(context, id, setName, outSchema, null, null, ResultType.RESULTS, null, - displayLanguage); + Element md = searchController.retrieveMetadata(context, id, setName, outSchema, null, null, ResultType.RESULTS,null, + displayLanguage, true); if (md != null) { final Map transformers = context.getApplicationContext() @@ -150,7 +146,7 @@ public Element execute(Element request, ServiceContext context) throws CatalogEx response.addContent(md); - if (_catalogConfig.isIncreasePopularity()) { + if (catalogConfig.isIncreasePopularity()) { gc.getBean(DataManager.class).increasePopularity(context, id); } } diff --git a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java index f66e5e8e91d..564280b4ce2 100644 --- a/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java +++ b/csw-server/src/main/java/org/fao/geonet/kernel/csw/services/getrecords/SearchController.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -26,6 +26,7 @@ import co.elastic.clients.elasticsearch._types.SortOptions; import co.elastic.clients.elasticsearch.core.SearchResponse; import co.elastic.clients.elasticsearch.core.search.Hit; +import co.elastic.clients.elasticsearch.core.search.TotalHits; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import jeeves.server.context.ServiceContext; @@ -56,18 +57,11 @@ import org.jdom.Content; import org.jdom.Element; import org.jdom.Namespace; -import org.geotools.api.filter.Filter; -import org.geotools.api.filter.capability.FilterCapabilities; import org.springframework.beans.factory.annotation.Autowired; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; public class SearchController { @@ -97,21 +91,58 @@ public class SearchController { * Retrieves metadata from the database. Conversion between metadata record and output schema * are defined in xml/csw/schemas/ directory. * - * @param context service context - * @param id id of metadata - * @param setName requested ElementSetName - * @param outSchema requested OutputSchema - * @param elemNames requested ElementNames - * @param typeName requested typeName - * @param resultType requested ResultType - * @param strategy ElementNames strategy - * @throws CatalogException hmm + * @param context service context + * @param id id of metadata + * @param setName requested ElementSetName + * @param outSchema requested OutputSchema + * @param elemNames requested ElementNames + * @param typeName requested typeName + * @param resultType requested ResultType + * @param strategy ElementNames strategy + * @param checkMetadataAvailableInPortal Checks if the metadata can be retrieved in the portal. + * Used in GetRecordById. GetRecords does a query with this check already. * @return The XML metadata record if the record could be converted to the required output * schema. Null if no conversion available for the schema (eg. fgdc record can not be converted * to ISO). + * @throws CatalogException hmm */ - public static Element retrieveMetadata(ServiceContext context, String id, ElementSetName setName, String - outSchema, Set elemNames, String typeName, ResultType resultType, String strategy, String displayLanguage) throws CatalogException { + public Element retrieveMetadata(ServiceContext context, String id, ElementSetName setName, String + outSchema, Set elemNames, String typeName, ResultType resultType, String strategy, String displayLanguage, + boolean checkMetadataAvailableInPortal) throws CatalogException { + + if (checkMetadataAvailableInPortal) { + // Check if the metadata is available in the portal + String elasticSearchQuery = "{ \"bool\": {\n" + + " \"must\": [\n" + + " {" + + " \"term\": {" + + " \"id\": {" + + " \"value\": \"%s\"" + + " }" + + " }" + + " } " + + " ]\n" + + " ,\"filter\":{\"query_string\":{\"query\":\"%s\"}}}}"; + + JsonNode esJsonQuery; + + try { + String filterQueryString = esFilterBuilder.build(context, "metadata", false, node); + String jsonQuery = String.format(elasticSearchQuery, id, filterQueryString); + + ObjectMapper objectMapper = new ObjectMapper(); + esJsonQuery = objectMapper.readTree(jsonQuery); + + TotalHits total = searchManager.query(esJsonQuery, new HashSet<>(), 0, 0).hits().total(); + + if (Optional.ofNullable(total).map(TotalHits::value).orElse(0L) == 0) { + return null; + } + } catch (Exception e) { + throw new RuntimeException(e); + } + + } try { //--- get metadata from DB @@ -150,12 +181,12 @@ public static Element retrieveMetadata(ServiceContext context, String id, Elemen res = applyElementNames(context, elemNames, typeName, scm, schema, res, resultType, info, strategy); - if(Log.isDebugEnabled(Geonet.CSW_SEARCH)) + if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) Log.debug(Geonet.CSW_SEARCH, "SearchController:retrieveMetadata: before applying postprocessing on metadata Element for id " + id); res = applyPostProcessing(context, scm, schema, res, outSchema, setName, resultType, id, displayLanguage); - if(Log.isDebugEnabled(Geonet.CSW_SEARCH)) + if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) Log.debug(Geonet.CSW_SEARCH, "SearchController:retrieveMetadata: All processing is complete on metadata Element for id " + id); if (res != null) { @@ -177,39 +208,39 @@ public static Element retrieveMetadata(ServiceContext context, String id, Elemen /** * Applies requested ElementNames and typeNames. - * + *

    * For ElementNames, several strategies are implemented. Clients can determine the behaviour by * sending attribute "elementname_strategy" with one of the following values: - * + *

    * csw202 relaxed context geonetwork26 - * + *

    * The default is 'relaxed'. The strategies cause the following behaviour: - * + *

    * csw202 -- compliant to the CSW2.0.2 specification. In particular this means that complete * metadata are returned that match the requested ElementNames, only if they are valid for their * XSD. This is because GeoNetwork only supports OutputFormat=application/xml, which mandates * that valid documents are returned. Because possibly not many of the catalog's metadata are * valid, this is not the default. - * + *

    * relaxed -- like csw202, but dropped the requirement to only include valid metadata. So this * returns complete metadata that match the requested ElementNames. This is the default * strategy. - * + *

    * context -- does not return complete metadata but only the elements matching the request, in * their context (i.e. all ancestor elements up to the root of the document are retained). This * strategy is similar to geonetwork26 but the context allows clients to determine which of the * elements returned corresponds to which of the elements requested (in case they have the same * name). - * + *

    * geonetwork26 -- behaviour as in GeoNetwork 2.6. Just return the requested elements, stripped * of any context. This can make it impossible for the client to determine which of the elements * returned corresponds to which of the elements requested; for example if the client asks for * gmd:title, the response may contain various gmd:title elements taken from different locations * in the metadata document. - * + *

    * ------------------------------------------------- Relevant sections of specification about * typeNames: - * + *

    * OGC 07-006 10.8.4.8: The typeNames parameter is a list of one or more names of queryable * entities in the catalogue's information model that may be constrained in the predicate of the * query. In the case of XML realization of the OGC core metadata properties (Subclause 10.2.5), @@ -220,16 +251,16 @@ public static Element retrieveMetadata(ServiceContext context, String id, Elemen * addition, all or some of the these queryable entity names may be specified in the query to * define which metadata record elements the query should present in the response to the * GetRecords operation. - * + *

    * OGC 07-045: - * + *

    * 8.2.2.1.1 Request (GetRecords) TypeNames. Must support *one* of “csw:Record” or * “gmd:MD_Metadata” in a query. Default value is “csw:Record”. - * + *

    * So, in OGC 07-045, exactly one of csw:Record or gmd:MD_Metadata is mandated for typeName. - * + *

    * ---------------------------------- Relevant specs about ElementNames: - * + *

    * OGC 07-006 10.8.4.9: The ElementName parameter is used to specify one or more metadata record * elements, from the output schema specified using the outputSchema parameter, that the query * shall present in the response to the a GetRecords operation. Since clause 10.2.5 realizes the @@ -237,19 +268,19 @@ public static Element retrieveMetadata(ServiceContext context, String id, Elemen * XPath expression perhaps using qualified names. In the general case, a complete XPath * expression may be required to correctly reference an element in the information model of the * catalog. - * + *

    * However, in the case where the typeNames attribute on the Query element contains a single * value, the catalogue can infer the first step in the path expression and it can be omitted. * This is usually the case when querying the core metadata properties since the only queryable * target is csw:Record. - * + *

    * If the metadata record element names are not from the schema specified using the outputSchema * parameter, then the service shall raise an exception as described in Subclause 10.3.7. - * + *

    * OGC 07-045: Usage of the ELEMENTNAME is not further specified here. - * + *

    * ---------------------------------- Relevant specs about outputFormat: - * + *

    * OGC 07-006 10.8.4.4 outputFormat parameter: In the case where the output format is * application/xml, the CSW shall generate an XML document that validates against a schema * document that is specified in the output document via the xsi:schemaLocation attribute @@ -289,7 +320,7 @@ private static Element applyElementNames(ServiceContext context, Set ele } boolean metadataContainsAllRequestedElementNames = true; - List nodes = new ArrayList(); + List nodes = new ArrayList<>(); for (String elementName : elementNames) { if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) Log.debug(Geonet.CSW_SEARCH, "SearchController dealing with elementName: " + elementName); @@ -342,7 +373,7 @@ private static Element applyElementNames(ServiceContext context, Set ele Log.debug(Geonet.CSW_SEARCH, "strategy is context, constructing context to root"); } - List elementsInContextMatching = new ArrayList(); + List elementsInContextMatching = new ArrayList<>(); for (Element match : elementsMatching) { Element parent = match.getParentElement(); while (parent != null) { @@ -370,7 +401,7 @@ private static Element applyElementNames(ServiceContext context, Set ele } } - if (metadataContainsAllRequestedElementNames == true) { + if (metadataContainsAllRequestedElementNames) { if (Log.isDebugEnabled(Geonet.CSW_SEARCH)) Log.debug(Geonet.CSW_SEARCH, "metadata containa all requested elementnames: included in response"); @@ -414,27 +445,27 @@ private static Element applyElementNames(ServiceContext context, Set ele /** * TODO improve description of method. Performs the general search tasks. * - * @param context Service context - * @param startPos start position (if paged) - * @param maxRecords max records to return - * @param resultType requested ResultType - * @param outSchema requested OutputSchema - * @param setName requested ElementSetName - * @param filterExpr requested FilterExpression - * @param filterVersion requested Filter version - * @param sort requested sorting - * @param elemNames requested ElementNames - * @param typeName requested typeName - * @param maxHitsFromSummary ? - * @param strategy ElementNames strategy + * @param context Service context + * @param startPos start position (if paged) + * @param maxRecords max records to return + * @param resultType requested ResultType + * @param outSchema requested OutputSchema + * @param setName requested ElementSetName + * @param filterExpr requested FilterExpression + * @param filterVersion requested Filter version + * @param sort requested sorting + * @param elemNames requested ElementNames + * @param typeName requested typeName + * @param maxHitsFromSummary ? + * @param strategy ElementNames strategy * @return result * @throws CatalogException hmm */ - public Element search(ServiceContext context, int startPos, int maxRecords, - ResultType resultType, String outSchema, ElementSetName setName, - Element filterExpr, String filterVersion, List sort, - Set elemNames, String typeName, int maxHitsFromSummary, - String strategy) throws CatalogException { + public Element search(ServiceContext context, int startPos, int maxRecords, + ResultType resultType, String outSchema, ElementSetName setName, + Element filterExpr, String filterVersion, List sort, + Set elemNames, String typeName, int maxHitsFromSummary, + String strategy) throws CatalogException { String elasticSearchQuery = convertCswFilterToEsQuery(filterExpr, filterVersion); @@ -455,31 +486,33 @@ public Element search(ServiceContext context, int startPos, int maxRecords, // TODO: Check to get summary or remove custom summary output try { - SearchResponse result = searchManager.query(esJsonQuery, new HashSet<>(), startPos-1, maxRecords, sort); + SearchResponse result = searchManager.query(esJsonQuery, new HashSet<>(), startPos - 1, maxRecords, sort); List hits = result.hits().hits(); - long numMatches = result.hits().hits().size(); + TotalHits total = result.hits().total(); + long numMatches = total != null ? total.value() : 0; if (numMatches != 0 && startPos > numMatches) { throw new InvalidParameterValueEx("startPosition", String.format( "Start position (%d) can't be greater than number of matching records (%d for current search).", startPos, numMatches - )); + )); } int counter = 0; ObjectMapper objectMapper = new ObjectMapper(); - for(Hit hit : hits) { - int mdId = Integer.parseInt((String) objectMapper.convertValue(hit.source(), Map.class).get("id")); + for (Hit hit : hits) { + int mdId = Integer.parseInt((String) objectMapper.convertValue(hit.source(), Map.class).get("id")); AbstractMetadata metadata = metadataUtils.findOne(mdId); String displayLanguage = context.getLanguage(); + // The query to retrieve GetRecords, filters by portal. No need to re-check again when retrieving each metadata. Element resultMD = retrieveMetadata(context, metadata.getId() + "", - setName, outSchema, elemNames, typeName, resultType, strategy, displayLanguage); + setName, outSchema, elemNames, typeName, resultType, strategy, displayLanguage, false); if (resultMD != null) { if (resultType == ResultType.RESULTS) { @@ -496,7 +529,7 @@ public Element search(ServiceContext context, int startPos, int maxRecords, results.setAttribute("elementSet", setName.toString()); - if (numMatches > counter) { + if (numMatches > counter + (startPos -1)) { results.setAttribute("nextRecord", Long.toString(counter + startPos)); } else { results.setAttribute("nextRecord", "0"); @@ -526,8 +559,8 @@ public Element search(ServiceContext context, int startPos, int maxRecords, * @throws InvalidParameterValueEx hmm */ public Element applyElementSetName(ServiceContext context, SchemaManager schemaManager, String schema, - Element result, String outputSchema, ElementSetName elementSetName, - ResultType resultType, String id, String displayLanguage) throws InvalidParameterValueEx { + Element result, String outputSchema, ElementSetName elementSetName, + ResultType resultType, String id, String displayLanguage) throws InvalidParameterValueEx { Path schemaDir = schemaManager.getSchemaCSWPresentDir(schema); Path styleSheet = schemaDir.resolve(outputSchema + "-" + elementSetName + ".xsl"); @@ -552,33 +585,33 @@ public Element applyElementSetName(ServiceContext context, SchemaManager schemaM } } - private String convertCswFilterToEsQuery(Element xml, String filterVersion) { + private String convertCswFilterToEsQuery(Element xml, String filterVersion) { return CswFilter2Es.translate(FilterParser.parseFilter(xml, filterVersion), fieldMapper); } /** * Applies postprocessing stylesheet if available. - * + *

    * Postprocessing files should be in the present/csw folder of the schema and have this naming: - * + *

    * For default CSW service - * + *

    * 1) gmd-csw-postprocessing.xsl : Postprocessing xsl applied for CSW service when requesting iso (gmd) output * 2) csw-csw-postprocessing.xsl : Postprocessing xsl applied for CSW service when requesting ogc (csw) output - * + *

    * For a custom sub-portal named inspire - * + *

    * 1) gmd-inspire-postprocessing.xsl : Postprocessing xsl applied for custom inspire sub-portal when requesting iso output * 2) csw-inspire-postprocessing.xsl : Postprocessing xsl applied for custom inspire sub-portal when requesting ogc (csw) output * - * @param context Service context - * @param schemaManager schemamanager - * @param schema schema - * @param result result - * @param outputSchema requested OutputSchema - * @param elementSetName requested ElementSetName - * @param resultType requested ResultTYpe - * @param id metadata id + * @param context Service context + * @param schemaManager schemamanager + * @param schema schema + * @param result result + * @param outputSchema requested OutputSchema + * @param elementSetName requested ElementSetName + * @param resultType requested ResultTYpe + * @param id metadata id * @param displayLanguage language to use in response * @return metadata * @throws InvalidParameterValueEx hmm @@ -586,7 +619,7 @@ private String convertCswFilterToEsQuery(Element xml, String filterVersion) { private static Element applyPostProcessing(ServiceContext context, SchemaManager schemaManager, String schema, Element result, String outputSchema, ElementSetName elementSetName, ResultType resultType, String id, String displayLanguage) throws InvalidParameterValueEx { - Path schemaDir = schemaManager.getSchemaCSWPresentDir(schema); + Path schemaDir = schemaManager.getSchemaCSWPresentDir(schema); final NodeInfo nodeInfo = ApplicationContextHolder.get().getBean(NodeInfo.class); @@ -595,7 +628,7 @@ private static Element applyPostProcessing(ServiceContext context, SchemaManager + "-postprocessing.xsl"); if (Files.exists(styleSheet)) { - Map params = new HashMap(); + Map params = new HashMap<>(); params.put("lang", displayLanguage); try { diff --git a/datastorages/cmis/pom.xml b/datastorages/cmis/pom.xml index 9ee1af76782..a595be9f683 100644 --- a/datastorages/cmis/pom.xml +++ b/datastorages/cmis/pom.xml @@ -28,7 +28,7 @@ gn-datastorages org.geonetwork-opensource.datastorage - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/datastorages/jcloud/pom.xml b/datastorages/jcloud/pom.xml index 01b948abb98..9c8ee48b0c0 100644 --- a/datastorages/jcloud/pom.xml +++ b/datastorages/jcloud/pom.xml @@ -28,7 +28,7 @@ gn-datastorages org.geonetwork-opensource.datastorage - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/datastorages/pom.xml b/datastorages/pom.xml index 3a290e17019..a7af09b2cfc 100644 --- a/datastorages/pom.xml +++ b/datastorages/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/datastorages/s3/pom.xml b/datastorages/s3/pom.xml index 26a0fcd7685..986223fb45a 100644 --- a/datastorages/s3/pom.xml +++ b/datastorages/s3/pom.xml @@ -28,7 +28,7 @@ gn-datastorages org.geonetwork-opensource.datastorage - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 00000000000..20f2716957f --- /dev/null +++ b/docker/README.md @@ -0,0 +1,3 @@ +# Docker Test Environments + +These docker images are intended for development and debugging. For production we recommend the official GeoNetwork docker images at https://github.com/geonetwork/docker-geonetwork.git repository. diff --git a/docker/docker-geonetwork.txt b/docker/docker-geonetwork.txt deleted file mode 100644 index 1e1ae93bf58..00000000000 --- a/docker/docker-geonetwork.txt +++ /dev/null @@ -1 +0,0 @@ -Please see https://github.com/geonetwork/docker-geonetwork.git. diff --git a/docker/gn-cas-ldap/README.md b/docker/gn-cas-ldap/README.md index 326641c3a64..ae16f5181ed 100644 --- a/docker/gn-cas-ldap/README.md +++ b/docker/gn-cas-ldap/README.md @@ -1,4 +1,4 @@ -# Introduction +# GeoNetwork CAS Test Environment This composition is meant to make runtime testing the CAS integration of GeoNetwork easier. @@ -6,6 +6,8 @@ GeoNetwork easier. This composition also integrates a LDAP, so that testing the config-spring-cas-ldap configuration is also possible. +These docker images are intended for development and debugging. For production we recommend the official GeoNetwork docker images at https://github.com/geonetwork/docker-geonetwork.git repository. + # Prerequisites It requires the GeoNetwork webapp to be built first: @@ -18,7 +20,6 @@ Then it can be launched: ``` $ docker-compose up - ``` # Accessing the CAS login page from GeoNetwork diff --git a/docker/gn-postgres/README.md b/docker/gn-postgres/README.md new file mode 100644 index 00000000000..fb5aa0e58d2 --- /dev/null +++ b/docker/gn-postgres/README.md @@ -0,0 +1,8 @@ +# GeoNetwork PostgreSQL Test Environment + +This composition is meant to make runtime testing the PostgreSQL integration of +GeoNetwork easier. + +This folder provides a ``docker-compose.yml`` file for local testing. + +These docker images are intended for development and debugging. For production we recommend the official GeoNetwork docker images at https://github.com/geonetwork/docker-geonetwork.git repository. diff --git a/docker/docker-compose.yml b/docker/gn-postgres/docker-compose.yml similarity index 97% rename from docker/docker-compose.yml rename to docker/gn-postgres/docker-compose.yml index c6d942427d9..3c40e613034 100644 --- a/docker/docker-compose.yml +++ b/docker/gn-postgres/docker-compose.yml @@ -4,7 +4,7 @@ volumes: services: geonetwork: - image: geonetwork:3.99.0 + image: geonetwork:latest restart: always ports: - 8080:8080 diff --git a/docker/gn-postgres/kibana/kibana.txt b/docker/gn-postgres/kibana/kibana.txt new file mode 100644 index 00000000000..942837bbb76 --- /dev/null +++ b/docker/gn-postgres/kibana/kibana.txt @@ -0,0 +1,5 @@ +server.basePath: "/geonetwork/dashboards" +server.rewriteBasePath: false +kibana.index: ".dashboards" +elasticsearch.hosts: ["http://elasticsearch:9200"] + diff --git a/docs/changes/changes3.12.12-0.txt b/docs/changes/changes3.12.12-0.txt new file mode 100644 index 00000000000..43a4fce6aa0 --- /dev/null +++ b/docs/changes/changes3.12.12-0.txt @@ -0,0 +1,64 @@ +================================================================================ +=== +=== GeoNetwork 3.12.12: List of changes +=== +================================================================================ +- Documentation / GeoNetwork 3.12 doing a release fixes (#7852) +- [Backport 3.12.x] GeoNetwork harvester / Check if a resource exists to save it, instead of trying to retrieve the file details, to avoid confusing NoSuchFileException exception (#7846) +- Harvesters / Reset harvester history pagination when selecting a harvester (#7836) +- Follow up of #7279 to unify the button links in the metadata detail page as done for the analog change in main branch (#7391) +- Remove invalid empty migration added accidentally in https://github.com/geonetwork/core-geonetwork/commit/93377dd1866a5ee3f5b0098bcd1dd6188c009771 (#7821) +- Doc / Editor configuration improvements (#7826) +- [Backport 3.12.x] Bump actions/setup-java from 4.0.0 to 4.1.0 (#7816) +- [Backport 3.12.x] Fix alignment of user enabled checkbox (#7772) +- Remove handlebars.js v2.0.0 (#7762) +- update 3.12.x branch to recent sphinx-build and crank up warnings +- Addressing docs glitch #7666 creating-group and authentication-mode +- Addressing docs glitch #7666 tutorials/deployment/index +- addressing docs-glitch in install-guide/configuring-database +- addressing docs-glitch in search-ui/enrichview and search-ui/loadview +- addressing docs-glitch in install-guide/map-print-setup +- addressing docs-glitch in publishing/managing-privileges +- corrected minor typo in install-guide/map-print-setup +- Fix conversion errors after switching to MkDocs +- manual review of mkdocs glitches +- [Backport 3.12.x] Create a metadata / Add dynamic and download privileges to the users in the same group (#7748) +- Metadata detail page - don't display the resources description duplicated (#6798) +- Use the generated metadata UUID for resource links when importing metadata with the option 'Generate UUID' (#7734) +- Remove unused jslint-maven-plugin +- [Backport 3.12.x] Bump org.json:json from 20140107 to 20240205 (#7723) +- [Backport 3.12.x] Github Actions / Bump stCarolas/setup-maven from 4 to 5 (#7719) +- [Backport 3.12.x] Bump commons-fileupload from 1.3.3 to 1.5 (#7698) +- Fix pdf link issue +- Fix mimetypes on attachments as some were incorrect. (#7676) +- Docs / Update copyright year +- Fix the grid on the homepage of the documentation (#7559) +- troubleshoot release module order of execution and profile acivation and handling of jetty folder +- Remember to include -Drelease flag so that all modules (even optional modules) are compiled and tested during QA +- [BP] Bump actions/setup-java from 3.12.0 to 4.0.0 (#7522) +- [BP] Service context null pointer (#7593) +- [BP] Overview not shown in PDF export when the overview image is stored in GeoNetwork and requires authentication to access it. Fixes #7540 (#7556) +- [BP] Update iso19139 csw-full.xsl (#7558) +- [BP] Add ownerId to geonet:info (#7547) +- [BP] Don't display header menu and footer in single metadata PDF export (#7532) +- [BP] Bump actions/setup-python from 4 to 5 (#7543) +- [BP] Remote INSPIRE Atom Feeds harvester - Remove duplicates by dataset identifier (#7491) +- [BP] When getting locale message, default locale to LocaleContextHolder when locale is null (#7516) +- [BP] Fix some cases that were not considering both message and description when displaying errors. (#7517) +- [BP] Docs / Fix the mike version to 2.0.0 and change the parameter --no-redirect to --alias-type=copy (changed in mike 2.0.0) (#7507) +- [BP] Check http links in documentation (#7496) +- [BP] Update manual links to use https://docs.geonetwork-opensource.org/ (#7487) +- [BP] Change the structure of the MkDocs assets. Stylesheets and logos are moved to the `overrides` directory (#7429) +- Fix publish link (#7479) +- Fix url link in full view. bracket ") " could be included in link (#7483) +- [BP] Batch edit access level for editor role (#7464) +- [BP] Add node identifier parameter for xlst processing. Required in skin.xsl (#7454) +- Fix other exceptions in tests related to (#6977) +- [BP] Remove exception class name from the error message (#6977) +- [BP] Fix cookies path when deployed on root "/" context (#7446) +- [BP] Fix exception handling from schematron validation so that it flags the metadata as invalid if there is an exception (#6978) +- [BP] Remove old password field for admins (#7417) +- [backport 3.12.x] Add documentation to GitHub workflows (#7414) +- [BP] Change the url the icon in the homepage is linking to (#7422) +- Remove changelog for latest and stable +- Correct canonical_version to stable diff --git a/docs/changes/changes4.2.9-0.txt b/docs/changes/changes4.2.9-0.txt new file mode 100644 index 00000000000..28b376d7ba9 --- /dev/null +++ b/docs/changes/changes4.2.9-0.txt @@ -0,0 +1,70 @@ +================================================================================ +=== +=== GeoNetwork 4.2.9: List of changes +=== +================================================================================ +- Fix startup error. Follow up #7456 (#7859) +- Documentation / GeoNetwork 4.2 doing a release fixes (#7647) +- [Backport 4.2.x] Extend proxy to manage duplicated parameters (#7854) +- [Backport 4.2.x] Configuration to restrict the hosts and ports accessible by the http proxy servlet (#7326) +- [Backport 4.2.x] GeoNetwork harvester / Check if a resource exists to save it, instead of trying to retrieve the file details, to avoid confusing NoSuchFileException exception (#7845) +- Standards / Formatter / Citation / Pick latest date (#7835) +- [Backport 4.2.x] INSPIRE / Add testsuite for IACS (#7834) +- [Backport 4.2.x] Harvester / Localfilesystem / Log properly to harvester log file. (#7833) +- [Backport 4.2.x] Record view / ISO19139 / ISO19115-3.2008 display the unit part in @uom attribute, not the full url (#7832) +- Harvesters / Reset harvester history pagination when selecting a harvester (#7831) +- Trigger metadata unpublish event when removing the privileges to the ALL group in the privileges dialog (#7828) +- Doc / Editor configuration improvements (#7827) +- Update lodash to version 4.17.21 (#7825) +- [Backport 4.2.x] Bump actions/setup-java from 4.0.0 to 4.1.0 (#7814) +- Record view / Don't add the associated resources in the metadata static page, this page doesn't include JS libs (#7797) +- [Backport 4.2.x] Decouple metadata user feedback from metadata rating feature (#7796) +- [Backport 4.2.x] Fix wrong manual links (#7793) +- [Backport 4.2.x] Additional ISO19139 German translations (#7788) +- [Backport 4.2.x] Replace the 'unlock' icon with the 'lock open' icon (#7787) +- [Backport 4.2.x] Removed @RequestHeader for "Accept" headers as it is not supported by openAPI specification (#7785) +- Fix missing MetadataStatusResponse and MetadataWorkflowStatusResponse in open api spec (#7783) +- Fix SpringDoc duplicate Schema name (#7781) +- [Backport 4.2.x] Fix duplicate GET operation on /{portal}/api/sources and missing /subportal endpoint (#7780) +- Spit getRecordAs @RequestMapping into getRecordAsJson and getRecordAsXML in order to fix duplicate operation Id - By having multiple @RequestMapping was causing to create operation id as getRecordAs and getRecordAs_1 +- [Backport 4.2.x] Fix alignment of user enabled checkbox (#7773) +- [Backport 4.2.x] Fix ISO19139 German labels (#7763) +- Remove handlebars.js v2.0.0 +- Reports / Fix extract user groups for non-admin users (#7746) +- Addressing docs glitch #7666 creating-group and authentication-mode +- Addressing docs glitch #7666 in installing-from-war-file, version-4.0.2 and tutorials/deployment/index +- addressing docs-glitch in install-guide/configuring-database +- addressing docs-glitch in search-ui/enrichview and search-ui/loadview +- addressing docs-glitch in install-guide/map-print-setup +- addressing docs-glitch in publishing/managing-privileges +- corrected minor typo in install-guide/map-print-setup +- Fix conversion errors after switching to MkDocs +- manual review of mkdocs glitches +- [Backport 4.2.x] Create a metadata / Add dynamic and download privileges to the users in the same group (#7744) +- Index / Add danish language. (#7736) +- [Backport 4.2.x] Documentation / Elasticsearch query endpoint - query samples (#7732) +- [Backport 4.2.x] Separate docs for _search and _msearch (#7731) +- Map viewer / Remove Stamen background layers - no longer available (#7730) +- Use the generated metadata UUID for resource links when importing metadata with the option 'Generate UUID' (#7729) +- Remove unused jslint-maven-plugin (#7727) +- [Backport 4.2.x] Bump org.json:json from 20140107 to 20240205 (#7724) +- [Backport 4.2.x] Github Actions / Bump stCarolas/setup-maven from 4 to 5 (#7720) +- Enable preemptive for csw requests with credentials (#5497) (#7716) +- [Backport 4.2.x] Add a role and feature matrix to the GeoNetwork documentation (#7709) +- Addressing docs translation glitch #7687- fixes creating-custom-editor +- Addressing docs translation glitch #7687- fixes adding-static-pages and configuring-search-fields +- [Backport 4.2.x] Bump commons-fileupload from 1.3.3 to 1.5 (#7699) +- [Backport 4.2.x] Remove empty class SourcesLib and deprecated/unused methods in ResourceLib / Sonarlint improvements (#7694) +- Update Springdoc so that it supports Map objects in the request parameters. By default injectable parameters are excluded from request parameters. Map is one of those object however it does occur where map objects are supplied as parameters and they should be added to open api spec. There are currently no cases where a map is injected on purpose into the request parameters. This will fix issues with missing request parameters documentation which are based on Map objects. +- Fix spring doc for attachment and keyword to better identify files resources being returned. Update attachment api "Get a metadata resource" should indicate that gets a file resource Also "Create a new resource" should identify that it consumes any resources Update keywords api "Download a thesaurus by name" should indicate that gets a file resource +- Fix springdoc so that enums names are used instead of toString This fixes bug where some apis will not execute correctly from the swagger pager due to the wrong enum value being supplied. i.e. visibility should be using enum values PUBLIC/PRIVATE instead of public/private in formatters/zip api, the format should be SIMPLE/PARTIAL/FULL instead of simple/partial/full +- Editor / Fix add element attribute (#7685) +- [Backport 4.2.x] Metadata editor / Fix javascript error in the add thumbnail option when the metadata has 1 WMS layer (#7684) +- [BP] Search results / Configure related records type depending on template. (#7376) +- Metadata editor / Fix javascript error when editing a metadata, due to undefined property in gnLinkToMetadata directive (#7682) +- Fix pdf link issue (#7681) +- Fix mimetypes on attachments as some were incorrect. (#7675) +- accidental localhost link in docs +- Docs / Update copyright year +- Bump github/codeql-action from 2 to 3 (#7662) +- Bump advanced-security/maven-dependency-submission-action from 3 to 4 (#7661) diff --git a/docs/changes/changes4.4.3-0.txt b/docs/changes/changes4.4.3-0.txt new file mode 100644 index 00000000000..ce5661888d1 --- /dev/null +++ b/docs/changes/changes4.4.3-0.txt @@ -0,0 +1,117 @@ +================================================================================ +=== +=== GeoNetwork 4.4.3: List of changes +=== +================================================================================ +- Update linux workflow to maven 3.8.3 +- require maven 3.8.3 minimum for MNG-7214 fix +- Move version 3.12.x changelog to archive +- Release notes for GeoNetwork 4.4.3. +- Release notes for GeoNetwork 4.29 / 3.12.12 versions +- add necessary welsh language files for translating the application (#7851) +- Standard / ISO19115-3 / Batch edit may trigger error on creation date (#7712) +- Fix startup error. Follow up #7456 (#7858) +- i18n / Transifex update. (#7855) +- ISO19139 / Index online resources application profile element encoded as anchor (#7798) +- Extend proxy to manage duplicated parameters (#7456) +- Documentation / GeoNetwork 4.4 doing a release fixes (#7648) +- Indexing / ISO / Properly index all keywords even if in different thesaurus block +- Editor / Distribution / Properly refresh list link on last one (#7844) +- GeoNetwork harvester / Check if a resource exists to save it, instead of trying to retrieve the file details, to avoid confusing NoSuchFileException exception (#7577) +- Update README.md +- Update README.md +- Bump actions/upload-artifact from 3.1.0 to 4.3.1 +- Bump ossf/scorecard-action from 2.1.2 to 2.3.1 +- Harvesting / WFS Features / Do not skip attributes even if geom is invalid. +- Editor / Associated resource / Add button icon configuration. +- Editor / Distribution improvements - Update configuration for ISO19139 distributions as protocols are not categorized as ISO19115-3 protocols. (#7838) +- Standards / Formatter / Citation / Pick latest date +- INSPIRE / Add testsuite for IACS (#7756) +- Harvester / Localfilesystem / Log properly to harvester log file. (#7660) +- Harvester / WFS / No need to manually managed commit interval (#7737) +- Record view / ISO19139 / ISO19115-3.2008 display the unit part in @uom attribute, not the full url (#7791) +- Harvesters / Reset harvester history pagination when selecting a harvester +- Thesaurus / Improve support of EU publication office SKOS format (#7673) +- Create scorecard.yml +- Doc / Editor configuration improvements (#7776) +- Update lodash to version 4.17.21 +- Improve Elasticsearch manual installation to disable security for development +- Docker / Update docker compose in es module to Elasticsearch 8 (#7817) +- Trigger metadata unpublish event when removing the privileges to the ALL group in the privileges dialog +- Bump actions/setup-java from 4.0.0 to 4.1.0 (#7808) +- Translated the index warnings / errors. (#7531) +- minor typo fixes +- System setting for documentation url (#7782) +- Record view / Don't add the associated resources in the metadata static page, this page doesn't include JS libs +- Decouple metadata user feedback from metadata rating feature (#7770) +- Thesaurus / Add support for codelist described using SDMX +- Fix wrong manual links +- Standard / ISO19115-3 / Quality report / Index descriptive results +- Additional ISO19139 German translations (#7778) +- Update the `set privileges` popup with the new icon +- Replace the 'unlock' icon with the 'lock open' icon which is clearer. This new icon has more visual differences with the lock icon and it's therefore easier for a user to see the difference. +- Removed @RequestHeader for "Accept" headers as it is not supported by openAPI specification (#7572) +- Fix missing MetadataStatusResponse and MetadataWorkflowStatusResponse in open api spec (#7627) +- Fix SpringDoc duplicate Schema name Without this fix springdoc would randomly pick between 2 conflicting schemas and this could produce incorrect results in the open api spec. +- Merge getSubPortals into getSources so that there is only one GET api for the operation. This fixes the bug with 2 GET operation on /{portal}/api/sources +- Update services/src/main/java/org/fao/geonet/api/sources/SourcesApi.java +- Fix duplicate GET operation on /{portal}/api/sources Add missing /subportal endpoint. +- Spit getRecordAs @RequestMapping into getRecordAsJson and getRecordAsXML in order to fix duplicate operation Id - By having multiple @RequestMapping was causing to create operation id as getRecordAs and getRecordAs_1 +- Fix alignment of user enabled checkbox (#7764) +- Fix ISO19139 German labels (#7761) +- Remove handlebars.js v2.0.0 +- Fix query field name in OverviewIndexFieldUpdater to update the metadata overview in the index +- API / Category / Fix update fields +- Addressing docs glitch #7666 creating-group and authentication-mode +- Addressing docs glitch #7666 in installing-from-war-file, version-4.0.2 and tutorials/deployment/index +- addressing docs-glitch in install-guide/configuring-database +- addressing docs-glitch in search-ui/enrichview and search-ui/loadview +- addressing docs-glitch in install-guide/map-print-setup +- addressing docs-glitch in publishing/managing-privileges +- corrected minor typo in install-guide/map-print-setup +- Fix conversion errors after switching to MkDocs +- manual review of mkdocs glitches +- Reports / Fix extract user groups for non-admin users (#7742) +- Create a metadata / Add dynamic and download privileges to the users in the same group (#7679) +- Thesaurus / Add support for thesaurus described using OWL format +- Editor / Add view name class to facilitate custom styling +- Update SECURITY.md +- Index / Add danish language. (#7697) +- Standard / ISO19115-3 / Improve french translation for temporal extent (#7700) +- Standard / ISO19115-3 / Editor configuration / Improve date field configuration (#7702) +- Documentation / Elasticsearch query endpoint - query samples (#7722) +- remove mention of q query parameter +- Separate docs for _search and _msearch +- Map viewer / Remove Stamen background layers - no longer available +- Use the generated metadata UUID for resource links when importing metadata with the option 'Generate UUID' +- Elasticssearch 8 upgrade (#7599) +- Vertical extent label modification (#7604) +- Remove unused jslint-maven-plugin (#7725) +- Bump org.json:json from 20140107 to 20240205 (#7701) +- using gn- icon definitions for all types now +- Github Actions / Bump stCarolas/setup-maven from 4 to 5 (#7718) +- Enable preemptive for csw requests with credentials (#5497) (#7706) +- Add a role and feature matrix to the GeoNetwork documentation (#7686) +- Addressing docs translation glitch #7687- fixes adding-static-pages and configuring-search-fields (#7696) +- Automatic formatting +- Bump commons-fileupload from 1.3.3 to 1.5 (#6851) +- Remove empty class SourcesLib and deprecated/unused methods in ResourceLib / Sonarlint improvements (#7692) +- Fix spring doc for attachment and keyword to better identify files resources being returned. Update attachment api "Get a metadata resource" should indicate that gets a file resource Also "Create a new resource" should identify that it consumes any resources Update keywords api "Download a thesaurus by name" should indicate that gets a file resource +- Fix springdoc so that enums names are used instead of toString This fixes bug where some apis will not execute correctly from the swagger pager due to the wrong enum value being supplied. i.e. visibility should be using enum values PUBLIC/PRIVATE instead of public/private in formatters/zip api, the format should be SIMPLE/PARTIAL/FULL instead of simple/partial/full +- Update Springdoc so that it supports Map objects in the request parameters. By default injectable parameters are excluded from request parameters. Map is one of those object however it does occur where map objects are supplied as parameters and they should be added to open api spec. There are currently no cases where a map is injected on purpose into the request parameters. This will fix issues with missing request parameters documentation which are based on Map objects. +- Editor / Fix add element attribute (#7683) +- Metadata editor / Fix javascript error in the add thumbnail option when the metadata has 1 WMS layer (#7646) +- Add configuration to filter out Elasticsearch fields when download or dynamic privileges are not set +- Fix javascript error accessing the metadata detail page in gnMetadataSocialLink directive +- Metadata editor / Fix javascript error when editing a metadata, due to undefined property in gnLinkToMetadata directive +- Fix pdf link issue (#7667) +- Editor / Distribution improvements (#7468) +- Fix mimetypes on attachments as some were incorrect. (#7671) +- accidental localhost link in docs +- Docs / Update copyright year +- Bump github/codeql-action from 2 to 3 (#7552) +- Bump advanced-security/maven-dependency-submission-action from 3 to 4 (#7655) +- Formatter / Withheld element not always hidden. +- Change log for version 4.4.2 (#7654) +- Change log for version 4.2.8 +- Update version to 4.4.3-SNAPSHOT \ No newline at end of file diff --git a/docs/changes/changes4.4.4-0.txt b/docs/changes/changes4.4.4-0.txt new file mode 100644 index 00000000000..5e536738e30 --- /dev/null +++ b/docs/changes/changes4.4.4-0.txt @@ -0,0 +1,45 @@ +================================================================================ +=== +=== GeoNetwork 4.4.4-SNAPSHOT: List of changes +=== +================================================================================ +- documentation navigation update for 4.4.4 release +- Setup for release of 4.4.4 with gitignore for build scripts +- Transfix update for 4.4.4 release +- Fix sql syntax for migrate to 4.4.4 script (#7956) +- Enforce development environment instructions to Elasticsearch 8.11.3 (#7866) +- Adjust nav tree for batchupdate options +- Guidance on use of GeoNetwork 2.0 harvester +- Update revise use of empty or mostly empty pages +- Remove q service, used to query in previous versions that used Lucene as the search engine +- Modify csv search export to escape double-quotes with double-quotes instead of backslash (#7927) +- Fix harvester URIMapper to handle local metadata (#7946) +- CSW server / Use portal filter in GetRecordById requests (#7890) +- CSW / GetRecords / Number of matches is not total match +- metadata history viewing with user profile level (#7450) +- Map / WPS / Add support for predefined WPS list (#7842) +- Hide the typeahead search suggestions when hitting the ENTER key in the search field without selecting a suggested value +- Standard / ISO19115-3 / Editor / Uom encoding support (#7915) +- Search / May fail with random_score depending on ES config (#7912) +- API / Import does not overwrite metadata if any validation status (#7703) +- Standard / ISO19115-3 / Use anchor encoding for IACS keywords (#7853) +- Fix issue with saving metadata status causes indexing of metadata which causes issues for db rollbacks (#7514) +- move email notification at the end of status change transaction (#7864) +- Upgrade springdoc to 1.7.0 (#7596) +- Configure harvesters log to avoid application log duplicated in harvester_default.log file. Fixes #7895 (#7896) +- Fix typo in Messages.properties (#7892) +- Fix LinksApiTest integration test. Related to changes in #7878 to process the links in a thread +- [gn]: hibernate troubles with jeeves and persistance manager (harvesting mef) (#6840) +- Feedback / Fix email to multiple recipients (#7875) +- Passing key into update/remove process xslt for iso 19139 to fix issue with updating/deleting resources with same url (#7431) +- Metadata link analysis improvements (#7878) +- Indexing / Improve geometry indexing and display +- Editor / GeoNames search failing with space +- Fill in 4.2.x releases for maintenance page (#7879) +- Map / Extent API / Background image failure if matrixset is not SRS code +- Optimise query used to retrieve metadata links results (#7453) +- Standard / ISO19115-3 / Schema / Move from srv 2.1 to 2.0 +- fix regex for urls where ampersand follows the ID +- OpenID / Cleaning up cached tokens (#7810) +- Editor / Associated resources panel improvements (#7669) +- Update version to 4.4.4-SNAPSHOT \ No newline at end of file diff --git a/docs/manual/README.md b/docs/manual/README.md index 6a32791b9e9..d01313f1f47 100644 --- a/docs/manual/README.md +++ b/docs/manual/README.md @@ -23,6 +23,12 @@ If you are using Python3: ```bash pip3 install -r requirements.txt ``` + + In the future you can update using: + + ```bash + pip3 install -r requirements.txt -U + ``` 2. Use ***mkdocs** to preview locally: @@ -55,6 +61,13 @@ If you use a Python virtual environment: pip install -r requirements.txt ``` + In the future you can update with: + + ```bash + source venv/bin/activate + pip3 install -r requirements.txt + ``` + 2. Use ***mkdocs*** to preview from virtual environment: ```bash @@ -63,18 +76,16 @@ If you use a Python virtual environment: 3. Preview: http://localhost:8000 -### Docker - -If you are not familiar with Python the MkDocs-material website has instructions for Docker: - -1. Run MkDocs in Docker environment: - + Preview uses a single version, so expect some warnings from version chooser: ``` - docker pull squidfunk/mkdocs-material - docker run --rm -it -p 8000:8000 -v ${PWD}:/docs squidfunk/mkdocs-material + "GET /versions.json HTTP/1.1" code 404 ``` + +4. Optional: Preview online help: -2. Preview: http://localhost:8000 + ```bash + mkdocs serve --config-file help.yml + ``` ## Maven Integration @@ -96,34 +107,36 @@ If you are not familiar with Python the MkDocs-material website has instructions mvn install -Pdefault ``` -## Deploy +## Publish Documentation + +We use ``mike`` for publishing (from the `gh-pages` branch). Docs are published by the ``.github/workflows/docs.yml`` automation each time pull-request is merged. -We use ``mike`` for publishing to https://geonetwork.github.io using `.` version: +If you wish to preview using your own `gh-pages` branch: -1. To deploy docs from the `main` branch to website `gh-pages` branch: +1. To deploy 4.4 docs as latest from the `main` branch to website `gh-pages` branch: ```bash - mike deploy --push --no-redirect --update-aliases 4.4 latest + mike deploy --title "4.4 Latest" --alias-type=copy --update-aliases 4.4 latest ``` - -2. To deploy documentation for a new release: + +2. To deploy documentation for stable release: ```bash - mike deploy --push --no-redirect --update-aliases 4.2 stable + mike deploy --push --alias-type=copy 4.2 stable ``` 3. When starting a new branch you can make it the default: ```bash - mike set-default --push 4.2 + mike set-default --push 4.6 ``` - - Hint: When starting a new branch update `overview/changelog/` navigation tree also. + + Hint: When starting a new branch update `overview/changelog/history/index.md` headings for latest, maintenance, stable (for prior branches also). 4. To publish documentation for a maintenance release: ```bash - mike deploy --push --no-redirect --update-aliases 3.12 maintenance + mike deploy --push --alias-type=copy 3.12 maintenance ``` 5. To show published versions: diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history.png new file mode 100644 index 00000000000..a89837bfcbd Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history_config.png b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history_config.png new file mode 100644 index 00000000000..f6a4860342a Binary files /dev/null and b/docs/manual/docs/administrator-guide/configuring-the-catalog/img/metadata_history_config.png differ diff --git a/docs/manual/docs/administrator-guide/configuring-the-catalog/portal-configuration.md b/docs/manual/docs/administrator-guide/configuring-the-catalog/portal-configuration.md index 6ef0fc230a5..817698f063b 100644 --- a/docs/manual/docs/administrator-guide/configuring-the-catalog/portal-configuration.md +++ b/docs/manual/docs/administrator-guide/configuring-the-catalog/portal-configuration.md @@ -44,6 +44,29 @@ The list of sub-portal available is at -m "" + ``` + + Use `git note remove` if you need to clear a note and start again: + ``` + git notes remove + ``` + + Preview changes using: + + ``` + git log --pretty='format:* %N' $previousversion... | grep -v "^* $" + ``` + + Save your notes: + ``` + git push origin refs/notes/commits + ``` + +2. Generate release notes: + + ```bash + ./release-notes + ``` + + After the script runs it will produces: + + * ``docs/changes/changes4.4.4-0.txt`` + + The last couple commits here can be removed (from the release steps above). + + * ``docs/manual/docs/overview/change-log/version-4.4.4.md`` + + This file can be updated based on highlights from: [milestone closed issues](https://github.com/geonetwork/core-geonetwork/pulls?q=is%3Apr+milestone%3A4.4.4+is%3Aclosed) + + Filter using: + + * label: `changelog` as Major Features + * label: api change + * label: `index structure change` as Index + * label: `bug` as Fixes + +3. Update the navigation: + + * ``docs/manual/mkdocs.yml`` + * ``docs/manual/docs/overview/change-log/latest/index.md`` + +### Build the release locally + +1. Use release build script: + + ```bash + ./release-build.sh + ``` + +2. Startup Elasticsearch + +3. Remove local database: + + ```bash + rm ~/gn.mv.db + rm ~/gn.trace.db + ``` + +4. Test the release: + + ```bash + ./release-test.sh + ``` + +5. Smoke Test: + + * Load ISO19139 samples and templates + * Display a record and try each of the views, and the XML download + * Use Contributor board to create a new record (from the "preferred" template) + * Try validation (validation errors are expected we just wish to check it runs) + * Try each of the editor views + +### Publish the release + +1. Publish + + ```bash + ./release-publish.sh + ``` + +2. Cleanup + + ```bash + ./release-restore.sh + ``` + +## Doing a manual release + This section documents the steps followed by the development team to do a new release. Once the release branch has been thoroughly tested and is stable a release can be made. @@ -56,9 +185,9 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. 2. Prepare change-log notes. - Git notes are managed similar to push and pulling tags. Start by pulling the latest notes: + Git notes are managed in `ref/notes/commits` similar to push and pulling tags. Start by pulling the latest notes: ``` - git pull origin refs/notes/commits + git fetch origin refs/notes/commits:refs/notes/commits ``` Review changes along with any notes: @@ -91,7 +220,7 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. 3. Create change log page: `docs/manual/docs/overview/change-log/` ``` shell - cat < docs/manual/docs/overview/changes/version-$newversion.md + cat < docs/manual/docs/overview/change-log/version-$version.md # Version $version GeoNetwork $version is a minor release. @@ -110,9 +239,9 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. EOF - git log --pretty='format:* %N' $previousversion.. | grep -v "^* $" >> docs/manual/docs/overview/changes/version-$newversion.md + git log --pretty='format:* %N' $previousversion.. | grep -v "^* $" >> docs/manual/docs/overview/change-log/version-$version.md - cat < docs/manual/docs/overview/changes/version-$newversion.md + cat < docs/manual/docs/overview/change-log/version-$version.md and more \... see [$version issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A$version+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A$version+is%3Aclosed) for full details. EOF @@ -164,7 +293,7 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. # Download Jetty and create the installer cd ../release - mvn clean install -Djetty-download,bundle + mvn clean install -Pjetty-download,bundle # Deploy to osgeo repository (requires credentials in ~/.m2/settings.xml) cd .. @@ -186,10 +315,12 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. # Set version number to SNAPSHOT ./update-version.sh $newversion $nextversion + nextversionnosnapshot=${nextversion//[-SNAPSHOT]/} + # Add SQL migration step for the next version - mkdir web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v442 - cat < web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v442/migrate-default.sql - UPDATE Settings SET value='4.4.2' WHERE name='system/platform/version'; + mkdir web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v${nextversionnosnapshot//[.]/} + cat < web/src/main/webapp/WEB-INF/classes/setup/sql/migrate/v${nextversionnosnapshot//[.]/}/migrate-default.sql + UPDATE Settings SET value='${nextversionnosnapshot}' WHERE name='system/platform/version'; UPDATE Settings SET value='SNAPSHOT' WHERE name='system/platform/subVersion'; EOF vi web/src/main/webResources/WEB-INF/config-db/database_migration.xml @@ -231,7 +362,7 @@ with the following utilities: ***sed***, ***xmlstarlet*** and ***sftp***. ``` shell md5 -r web/target/geonetwork.war > web/target/geonetwork.war.md5 - md5 -r release/target/GeoNetwork-$newversion/geonetwork-bundle-$newversion.zip > release/target/GeoNetwork-$newversion/geonetwork-bundle-$newversion.zip.md5 + md5 -r release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip > release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.md5 ``` On sourceforge first: diff --git a/docs/manual/docs/help/record/index.md b/docs/manual/docs/help/record/index.md index 7b7b9c58a7a..df3b8cd7e52 100644 --- a/docs/manual/docs/help/record/index.md +++ b/docs/manual/docs/help/record/index.md @@ -194,7 +194,7 @@ Download the contents of a single record. - A folder containing complete **`metadata.xml`** record, and simplified ***`metadata-iso19139.xml`*** record. - ***`index.html`** and **`index.csv`** summary described in - the [previous section](#download-from-search-results). + the [previous section](../search/index.md#download-from-search-results). ![](img/export_record_zip.png) *Export (ZIP) index.html summary* diff --git a/docs/manual/docs/maintainer-guide/updating/index.md b/docs/manual/docs/maintainer-guide/updating/index.md index 48cb0ac2b92..a7c56978ee6 100644 --- a/docs/manual/docs/maintainer-guide/updating/index.md +++ b/docs/manual/docs/maintainer-guide/updating/index.md @@ -99,7 +99,7 @@ Update: Guidance: -* [Changelog 4.4.x](../../overview/change-log/latest/index.md) +* [Changelog 4.4](../../overview/change-log/history/index.md#44) * No additional guidance provided at this time. ## Upgrade from GeoNetwork 3.0 to GeoNetwork 4.4 @@ -161,5 +161,6 @@ Guidance: * For information on new features and functionality: - * [Changelog 4.4.x](../../overview/change-log/latest/index.md) - * [Changelog 4.0.x](../../overview/change-log/archive/index.md#40x) + * [Changelog 4.4](../../overview/change-log/history/index.md#44) + * [Changelog 4.2](../../overview/change-log/history/index.md#42) + * [Changelog 4.0](../../overview/change-log/history/index.md#40) diff --git a/docs/manual/docs/overview/change-log/archive/index.md b/docs/manual/docs/overview/change-log/history/index.md similarity index 55% rename from docs/manual/docs/overview/change-log/archive/index.md rename to docs/manual/docs/overview/change-log/history/index.md index 3c9dd91e5f5..298f544c401 100644 --- a/docs/manual/docs/overview/change-log/archive/index.md +++ b/docs/manual/docs/overview/change-log/history/index.md @@ -1,12 +1,59 @@ -# Archived +# Release History -The following series of GeoNetwork are no longer in active use and you are encouraged to migrate to a newer version. - -The GeoNetwork community is not large enough to maintain many active branches of GeoNetwork at one time. Older installations of GeoNetwork may be subject to disclosed security vulnerabilities resulting in our recommendation to update to the supported [stable](../stable/index.md) or [maintenance](../maintenance/index.md) series. +The GeoNetwork community is not large enough to maintain many active branches of GeoNetwork at one time. Older installations of GeoNetwork may be subject to disclosed security vulnerabilities resulting in our recommendation to update to the supported [stable](#stable-release) or [maintenance](#maintenance-release) series listed below. Volunteers wishing to backport security fixes to older versions of GeoNetwork are welcome to do so. Commercial support providers are welcome to do so on behalf of their customers. -## 4.0.x +## Latest Release + +The latest release of GeoNetwork is recommended for those enjoying the newest features from the GeoNetwork community. + +This series is under **active development** by our community, with new features, improvements, documentation updates, bug reports, fixes, and releases. + +### 4.4 + +- [Version 4.4.4](../version-4.4.4.md) +- [Version 4.4.3](../version-4.4.3.md) +- [Version 4.4.2](../version-4.4.2.md) +- [Version 4.4.1](../version-4.4.1.md) +- [Version 4.4.0](../version-4.4.0.md) + +## Stable Release + +The stable release of GeoNetwork is recommended for production use and for new installations of GeoNetwork. + +This series is under **active use** by our community, with regular improvements, documentation updates, bug reports, fixes, and releases. + +### 4.2 + +- [Version 4.2.9](../version-4.2.9.md) +- [Version 4.2.8](../version-4.2.8.md) +- [Version 4.2.7](../version-4.2.7.md) +- [Version 4.2.6](../version-4.2.6.md) +- [Version 4.2.5](../version-4.2.5.md) +- [Version 4.2.4](../version-4.2.4.md) +- [Version 4.2.3](../version-4.2.3.md) +- [Version 4.2.2](../version-4.2.2.md) +- [Version 4.2.1](../version-4.2.1.md) +- [Version 4.2.0](../version-4.2.0.md) + +## Maintenance Release + +The maintenance release of GeoNetwork provides production systems with essential updates and fixes to allow time to upgrade to the recommended stable release. + +When available the maintenace series provides **time to upgrade** for our community - limited to providing essential fixes only. + +### No maintenance release + +With the final release of 3.12.12 there is no active maintenance release being provided. + +Production systems are advised to update to 4.2 above if they have not already done so. + +## Archived Releases + +The following series of GeoNetwork are no longer in active use and you are encouraged to migrate to a newer version. + +### 4.0 - [Version 4.0.6](../version-4.0.6.md) - [Version 4.0.5](../version-4.0.5.md) @@ -18,7 +65,23 @@ Volunteers wishing to backport security fixes to older versions of GeoNetwork ar - [Version 4.0.0 Alpha.2](../version-4.0.0-alpha.2.md) - [Version 4.0.0 Alpha.1](../version-4.0.0-alpha.1.md) -## 3.10.x +### 3.12 + +- [Version 3.12.12](../version-3.12.12.md) +- [Version 3.12.11](../version-3.12.11.md) +- [Version 3.12.10](../version-3.12.10.md) +- [Version 3.12.9](../version-3.12.9.md) +- [Version 3.12.8](../version-3.12.8.md) +- [Version 3.12.7](../version-3.12.7.md) +- [Version 3.12.6](../version-3.12.6.md) +- [Version 3.12.5](../version-3.12.5.md) +- [Version 3.12.4](../version-3.12.4.md) +- [Version 3.12.3](../version-3.12.3.md) +- [Version 3.12.3](../version-3.12.2.md) +- [Version 3.12.1](../version-3.12.1.md) +- [Version 3.12.0](../version-3.12.0.md) + +### 3.10 - [Version 3.10.10](../version-3.10.10.md) - [Version 3.10.9](../version-3.10.9.md) @@ -50,18 +113,18 @@ Volunteers wishing to backport security fixes to older versions of GeoNetwork ar - [Version 3.0.1](../version-3.0.1.md) - [Version 3.0.0](../version-3.0.0.md) -## 3.8.x +### 3.8 - [Version 3.8.3](../version-3.8.3.md) - [Version 3.8.2](../version-3.8.2.md) - [Version 3.8.1](../version-3.8.1.md) - [Version 3.8.0](../version-3.8.0.md) -## 3.6.x +### 3.6 - [Version 3.6.0](../version-3.6.0.md) -## 3.4.x +### 3.4 - [Version 3.4.4](../version-3.4.4.md) - [Version 3.4.3](../version-3.4.3.md) @@ -69,13 +132,13 @@ Volunteers wishing to backport security fixes to older versions of GeoNetwork ar - [Version 3.4.1](../version-3.4.1.md) - [Version 3.4.0](../version-3.4.0.md) -## 3.2.x +### 3.2 - [Version 3.2.2](../version-3.2.2.md) - [Version 3.2.1](../version-3.2.1.md) - [Version 3.2.0](../version-3.2.0.md) -## 3.0.x +### 3.0 - [Version 3.0.4](../version-3.0.4.md) - [Version 3.0.3](../version-3.0.3.md) @@ -83,7 +146,7 @@ Volunteers wishing to backport security fixes to older versions of GeoNetwork ar - [Version 3.0.1](../version-3.0.1.md) - [Version 3.0.0](../version-3.0.0.md) -## 2.10.x +### 2.10 All development has ceased: diff --git a/docs/manual/docs/overview/change-log/index.md b/docs/manual/docs/overview/change-log/index.md index 83a627c6074..8b8a2296939 100644 --- a/docs/manual/docs/overview/change-log/index.md +++ b/docs/manual/docs/overview/change-log/index.md @@ -2,7 +2,6 @@ Notable changes made to GeoNetwork opensource including new features, migration instructions, and bug fixes. -- [Latest](latest/index.md) -- [Stable](stable/index.md) -- [Maintenance](maintenance/index.md) -- [Archive](archive/index.md) +- [Version 4.4.4](version-4.4.4.md) +- [Version 4.2.9](version-4.2.9.md) +- [Release History](history/index.md) diff --git a/docs/manual/docs/overview/change-log/latest/index.md b/docs/manual/docs/overview/change-log/latest/index.md deleted file mode 100644 index d4306639288..00000000000 --- a/docs/manual/docs/overview/change-log/latest/index.md +++ /dev/null @@ -1,14 +0,0 @@ -# Latest - -GeoNetwork 4.4.x is recommended for those enjoying the newest features from the GeoNetwork community. - -This series is under active development by our community, with new features, improvements, documentation updates, bug reports, fixes, and releases. - -## Latest - -- [Version 4.4.2](../version-4.4.2.md) - -## History - -- [Version 4.4.1](../version-4.4.1.md) -- [Version 4.4.0](../version-4.4.0.md) diff --git a/docs/manual/docs/overview/change-log/maintenance/index.md b/docs/manual/docs/overview/change-log/maintenance/index.md deleted file mode 100644 index a7939f78428..00000000000 --- a/docs/manual/docs/overview/change-log/maintenance/index.md +++ /dev/null @@ -1,22 +0,0 @@ -# Maintenance - -The GeoNetwork 4.2.x series is stable and recommended for production use and new installations of GeoNetwork. -This series is under active use by our community, with regular improvements, documentation updates, bug reports, fixes, and releases. - -## Latest - -* [Version 3.12.11](../version-3.12.11.md) - -## History - -* [Version 3.12.10](../version-3.12.10.md) -* [Version 3.12.9](../version-3.12.9.md) -* [Version 3.12.8](../version-3.12.8.md) -* [Version 3.12.7](../version-3.12.7.md) -* [Version 3.12.6](../version-3.12.6.md) -* [Version 3.12.5](../version-3.12.5.md) -* [Version 3.12.4](../version-3.12.4.md) -* [Version 3.12.3](../version-3.12.3.md) -* [Version 3.12.3](../version-3.12.2.md) -* [Version 3.12.1](../version-3.12.1.md) -* [Version 3.12.0](../version-3.12.0.md) diff --git a/docs/manual/docs/overview/change-log/stable/index.md b/docs/manual/docs/overview/change-log/stable/index.md deleted file mode 100644 index e1e8d081652..00000000000 --- a/docs/manual/docs/overview/change-log/stable/index.md +++ /dev/null @@ -1,20 +0,0 @@ -# Stable - -The GeoNetwork 4.2.x series is stable and recommended for production use and new installations of GeoNetwork. -This series is under active use by our community, with regular improvements, documentation updates, bug reports, fixes, and releases. - -## Latest - -- [Version 4.2.8](../version-4.2.8.md) - - -## History - -- [Version 4.2.7](../version-4.2.7.md) -- [Version 4.2.6](../version-4.2.6.md) -- [Version 4.2.5](../version-4.2.5.md) -- [Version 4.2.4](../version-4.2.4.md) -- [Version 4.2.3](../version-4.2.3.md) -- [Version 4.2.2](../version-4.2.2.md) -- [Version 4.2.1](../version-4.2.1.md) -- [Version 4.2.0](../version-4.2.0.md) diff --git a/docs/manual/docs/overview/change-log/version-3.0.0.md b/docs/manual/docs/overview/change-log/version-3.0.0.md index 3a35f9b542d..94d9df24f0a 100644 --- a/docs/manual/docs/overview/change-log/version-3.0.0.md +++ b/docs/manual/docs/overview/change-log/version-3.0.0.md @@ -1 +1,14 @@ # Version 3.0.0 {#version-300} + +## New features + +This is a major release changing dropping support for ExtJS UI completing transition to AngularJS. + +* ISO19139 / INSPIRE conformity +* Feature/harvester admin 2 +* Map: Make add WMS more robust +* Disable ExtUI + +## Bug Fixes + +Please see [3.0.0 issues](https://github.com/geonetwork/core-geonetwork/issues?page=2&q=is%3Aissue+milestone%3A3.0.0+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?q=milestone%3A3.0.0+is%3Aclosed+is%3Apr) for full details. diff --git a/docs/manual/docs/overview/change-log/version-3.12.12.md b/docs/manual/docs/overview/change-log/version-3.12.12.md new file mode 100644 index 00000000000..79c6d07c10f --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-3.12.12.md @@ -0,0 +1,24 @@ +# Version 3.12.12 + +GeoNetwork 3.12.12 is a minor release. + +## List of changes + +- Metadata + - [BP] [Fix exception handling from schematron validation so that it flags the metadata as invalid if there is an exception](https://github.com/geonetwork/core-geonetwork/pull/6978) + - [BP] [Overview not shown in PDF export when the overview image is stored in GeoNetwork and requires authentication to access it](https://github.com/geonetwork/core-geonetwork/pull/7556) + +- Administration + - [BP] [Harvesters / Reset harvester history pagination when selecting a harvester](https://github.com/geonetwork/core-geonetwork/pull/7836) + - [BP] [GeoNetwork harvester / Check if a resource exists to save it, instead of trying to retrieve the file details, to avoid confusing NoSuchFileException exception](https://github.com/geonetwork/core-geonetwork/pull/7846) + +- Other + - [BP] [Fix cookies path when deployed on root "/" context](https://github.com/geonetwork/core-geonetwork/pull/7446) + - [BP] [Remove exception class name from the error message](https://github.com/geonetwork/core-geonetwork/pull/6977) + - Update `org.json:json` from version 20140107 to 20240205 + - Update `commons-fileupload` from version 1.3.3 to 1.5 + - Documentation / Manual improvements + +and more \... see [3.12.12 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A3.12.12+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A3.12.12+is%3Aclosed) for full details. + +**Full Changelog**: [here](https://github.com/geonetwork/core-geonetwork/compare/3.12.11...3.12.12) diff --git a/docs/manual/docs/overview/change-log/version-3.4.1.md b/docs/manual/docs/overview/change-log/version-3.4.1.md index a02b3854b05..5ecc6f0eb09 100644 --- a/docs/manual/docs/overview/change-log/version-3.4.1.md +++ b/docs/manual/docs/overview/change-log/version-3.4.1.md @@ -1,6 +1,10 @@ # Version 3.4.1 {#version-341} -## New features +## New features / enhancements + +* New metadata page / sort tempalte by title +* Create record from template having categories should copy cagtegories +* Import / Add option to publish to all ## Removed features diff --git a/docs/manual/docs/overview/change-log/version-4.2.9.md b/docs/manual/docs/overview/change-log/version-4.2.9.md new file mode 100644 index 00000000000..875c88c9d5e --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.2.9.md @@ -0,0 +1,18 @@ +# Version 4.2.9 {#version-429} + +GeoNetwork 4.2.9 release is a minor release. + +## List of changes + +Major changes: + +- [Create a metadata / Add dynamic and download privileges to the users in the same group](https://github.com/geonetwork/core-geonetwork/pull/7744) +- [Decouple metadata user feedback from metadata rating feature](https://github.com/geonetwork/core-geonetwork/pull/7796) +- [Extend http proxy to manage duplicated parameters](https://github.com/geonetwork/core-geonetwork/pull/7854) +- [Fix MIME-types on attachments](https://github.com/geonetwork/core-geonetwork/pull/7675) +- [Fix pdf link to the application website](https://github.com/geonetwork/core-geonetwork/pull/7681) +- Update `org.json:json` from version 20140107 to 20240205 +- Documentation / Manual improvements +- Documentation / API SpringDoc fixes + +and more \... see [4.2.9 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.2.9+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.2.9+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.0.md b/docs/manual/docs/overview/change-log/version-4.4.0.md index f2142230d3a..15c3fe144d8 100644 --- a/docs/manual/docs/overview/change-log/version-4.4.0.md +++ b/docs/manual/docs/overview/change-log/version-4.4.0.md @@ -4,6 +4,8 @@ GeoNetwork 4.4.0 release is a major release. ## Migration notes +When migrating from 4.2.x series: + ### Java **Version 4.4 only works on Java 11.** diff --git a/docs/manual/docs/overview/change-log/version-4.4.1.md b/docs/manual/docs/overview/change-log/version-4.4.1.md index 46a226dad58..ba97da38ec5 100644 --- a/docs/manual/docs/overview/change-log/version-4.4.1.md +++ b/docs/manual/docs/overview/change-log/version-4.4.1.md @@ -2,15 +2,13 @@ GeoNetwork 4.4.1 release is a minor release. -## Migration notes +## Update notes -### Java - -**Version 4.4 only works on Java 11.** +When updating please review the following actions: ### Index changes -After update, don't forget to go to admin console --> tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.2.md b/docs/manual/docs/overview/change-log/version-4.4.2.md index 807950b6642..11864f8b524 100644 --- a/docs/manual/docs/overview/change-log/version-4.4.2.md +++ b/docs/manual/docs/overview/change-log/version-4.4.2.md @@ -2,15 +2,13 @@ GeoNetwork 4.4.2 release is a minor release. -## Migration notes +## Update notes -### Java - -**Version 4.4 only works on Java 11.** +When updating please review the following actions: ### Index changes -After update, don't forget to go to admin console --> tools --> Delete index and reindex. +After updating use **Admin Console > Tools** and use **Delete index and reindex**. ## List of changes diff --git a/docs/manual/docs/overview/change-log/version-4.4.3.md b/docs/manual/docs/overview/change-log/version-4.4.3.md new file mode 100644 index 00000000000..1b38da471dd --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.3.md @@ -0,0 +1,44 @@ +# Version 4.4.3 {#version-423} + +GeoNetwork 4.4.3 release is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +This version use Elasticsearch version 8 Java client, it is recommended to use an Elasticsearch version 8 server. +However version 7.15+ and 8+ have been tested. + +After updating use **Admin Console > Tools** and use **Delete index and reindex**. + +### Map + +[Stamen background layers are not available, update your maps](https://github.com/geonetwork/core-geonetwork/pull/7715). + + +## List of changes + +Major changes: + +- [Elasticssearch 8 upgrade](https://github.com/geonetwork/core-geonetwork/pull/7599) +- [Editor / Distribution panel improvements](https://github.com/geonetwork/core-geonetwork/pull/7468) +- [Thesaurus / Add support for codelist described using SDMX](https://github.com/geonetwork/core-geonetwork/pull/7790) +- [Thesaurus / Add support for thesaurus described using OWL format](https://github.com/geonetwork/core-geonetwork/pull/7674) +- [Thesaurus / Improve support of EU publication office SKOS format](https://github.com/geonetwork/core-geonetwork/pull/7673) +- [INSPIRE / Add testsuite for IACS](https://github.com/geonetwork/core-geonetwork/pull/7756) +- [Map viewer / Remove Stamen background layers - no longer available](https://github.com/geonetwork/core-geonetwork/pull/7715) +- [i18n / Add welsh language for user interface](https://github.com/geonetwork/core-geonetwork/pull/7851) +- [Index / Add danish language configuration](https://github.com/geonetwork/core-geonetwork/pull/7697) +- [Index / Translated the index warnings and errors](https://github.com/geonetwork/core-geonetwork/pull/7531) +- [Create a metadata / Add dynamic and download privileges to the users in the same group](https://github.com/geonetwork/core-geonetwork/pull/7744) +- [Decouple metadata user feedback from metadata rating feature](https://github.com/geonetwork/core-geonetwork/pull/7796) +- [Extend http proxy to manage duplicated parameters](https://github.com/geonetwork/core-geonetwork/pull/7854) +- [Fix MIME-types on attachments](https://github.com/geonetwork/core-geonetwork/pull/7675) +- [Fix pdf link to the application website](https://github.com/geonetwork/core-geonetwork/pull/7681) +- Update `org.json:json` from version 20140107 to 20240205 +- Documentation / Manual improvements +- Documentation / API SpringDoc fixes + +and more \... see [4.4.3 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.3+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.3+is%3Aclosed) for full details. diff --git a/docs/manual/docs/overview/change-log/version-4.4.4.md b/docs/manual/docs/overview/change-log/version-4.4.4.md new file mode 100644 index 00000000000..7a6b27c37c8 --- /dev/null +++ b/docs/manual/docs/overview/change-log/version-4.4.4.md @@ -0,0 +1,29 @@ +# Version 4.4.4 + +GeoNetwork 4.4.4 is a minor release. + +## Update notes + +When updating please review the following actions: + +### Index changes + +After updating use **Admin Console > Tools** and use **Delete index and reindex**: + +* [Passing key into update/remove process xslt for iso 19139 to fix issue with updating/deleting resources with same url](https://github.com/geonetwork/core-geonetwork/pull/7431) + +## List of changes + +Major changes: + +* [CSW / GetRecords / Number of matches in page info](https://github.com/geonetwork/core-geonetwork/pull/7937) + +* [Editor associated resources planel is redesigned with a new user-interface to link to external resources (DOI and URL to external catalogue)](https://github.com/geonetwork/core-geonetwork/pull/7669) + +Fixes: + +* [Search Export CSV - Escape double-quotes with double-quotes instead of backslash](https://github.com/geonetwork/core-geonetwork/pull/7927) + +* [Metadata feedback / Fix email to multiple recipients](https://github.com/geonetwork/core-geonetwork/pull/7875) + +and more \... see [4.4.4-0 issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A4.4.4+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A4.4.4+is%3Aclosed) for full details. diff --git a/docs/manual/docs/user-guide/associating-resources/doi.md b/docs/manual/docs/user-guide/associating-resources/doi.md index 31434831688..cabc05ecba5 100644 --- a/docs/manual/docs/user-guide/associating-resources/doi.md +++ b/docs/manual/docs/user-guide/associating-resources/doi.md @@ -15,7 +15,13 @@ A record can be downloaded using the DataCite format from the API using: Click here to go to stable. +{% endblock %} diff --git a/docs/manual/pom.xml b/docs/manual/pom.xml index 68654e27401..44d232849a4 100644 --- a/docs/manual/pom.xml +++ b/docs/manual/pom.xml @@ -27,7 +27,7 @@ gn-docs org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 gn-guide diff --git a/docs/pom.xml b/docs/pom.xml index e1c5e3b5c5c..a2415b55dd4 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 gn-docs diff --git a/doi/pom.xml b/doi/pom.xml index e0fb8dcbf9f..eec0b530cfe 100644 --- a/doi/pom.xml +++ b/doi/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java index 46cb4ab7b99..934895b2cdd 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/BaseDoiClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -94,14 +94,22 @@ protected void create(String url, String body, String contentType, url, body, status, httpResponse.getStatusText(), responseBody); Log.info(LOGGER_NAME, message); - throw new DoiClientException(message); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } else { Log.info(LOGGER_NAME, String.format( successMessage, url)); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error creating DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{ex.getMessage()}); } finally { if (postMethod != null) { @@ -139,13 +147,24 @@ protected String retrieve(String url) } else { Log.info(LOGGER_NAME, "Retrieve DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() + - CharStreams.toString(new InputStreamReader(httpResponse.getBody()))); + String message = httpResponse.getStatusText() + + CharStreams.toString(new InputStreamReader(httpResponse.getBody())); + + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{message}); + } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error retrieving DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorRetrieve") + .withDescriptionKey("exception.doi.serverErrorRetrieve.description", new String[]{ex.getMessage()}); } finally { if (getMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java index 589d7f137a9..73317a4b122 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiDataciteClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2023 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -24,8 +24,6 @@ import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.io.IOUtils; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.methods.HttpDelete; import org.fao.geonet.ApplicationContextHolder; import org.fao.geonet.utils.GeonetHttpRequestFactory; @@ -179,14 +177,24 @@ public void deleteDoiMetadata(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI metadata end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI metadata end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { @@ -219,14 +227,25 @@ public void deleteDoi(String doi) if ((status != HttpStatus.SC_NOT_FOUND) && (status != HttpStatus.SC_OK)) { Log.info(LOGGER_NAME, "Delete DOI end -- Error: " + httpResponse.getStatusText()); - throw new DoiClientException( httpResponse.getStatusText() ); + String message = httpResponse.getStatusText(); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); } else { Log.info(LOGGER_NAME, "DeleteDOI end"); } } catch (Exception ex) { Log.error(LOGGER_NAME, " -- Error (exception): " + ex.getMessage(), ex); - throw new DoiClientException(ex.getMessage()); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{ex.getMessage()}); } finally { if (deleteMethod != null) { diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java index 87871c21d72..c21d0f3c4a0 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiManager.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -475,7 +475,11 @@ public void unregisterDoi(AbstractMetadata metadata, ServiceContext context) thr dm.updateMetadata(context, metadata.getId() + "", recordWithoutDoi, false, true, context.getLanguage(), new ISODate().toString(), true, IndexingMode.full); } catch (Exception ex) { - throw new DoiClientException(ex.getMessage()); + throw new DoiClientException(String.format( + "Error unregistering DOI: %s", + ex.getMessage())) + .withMessageKey("exception.doi.serverErrorUnregister") + .withDescriptionKey("exception.doi.serverErrorUnregister.description", new String[]{ex.getMessage()}); } } @@ -487,8 +491,14 @@ public Element setDOIValue(String doi, String schema, Element md) throws Excepti Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_ADD_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_ADD_XSL_PROCESS)); + String message = String.format("To create a DOI, the schema has to defined how to insert a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_ADD_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } String doiPublicUrl = client.createPublicUrl(""); @@ -507,8 +517,15 @@ public Element removeDOIValue(String doi, String schema, Element md) throws Exce Path styleSheet = dm.getSchemaDir(schema).resolve(DOI_REMOVE_XSL_PROCESS); boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", - schema, DOI_REMOVE_XSL_PROCESS)); + String message = String.format("To remove a DOI, the schema has to defined how to remove a DOI in the record. The schema_plugins/%s/process/%s was not found. Create the XSL transformation.", + schema, DOI_REMOVE_XSL_PROCESS); + + throw new DoiClientException(String.format( + "Error deleting DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorDelete") + .withDescriptionKey("exception.doi.serverErrorDelete.description", new String[]{message}); + } Map params = new HashMap<>(1); @@ -528,8 +545,14 @@ private Element convertXmlToDataCiteFormat(String schema, Element md, String doi isMedra ? DATACITE_MEDRA_XSL_CONVERSION_FILE : DATACITE_XSL_CONVERSION_FILE); final boolean exists = Files.exists(styleSheet); if (!exists) { - throw new DoiClientException(String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", - schema, DATACITE_XSL_CONVERSION_FILE)); + String message = String.format("To create a DOI, the record needs to be converted to the DataCite format (https://schema.datacite.org/). You need to create a formatter for this in schema_plugins/%s/%s. If the standard is a profile of ISO19139, you can simply point to the ISO19139 formatter.", + schema, DATACITE_XSL_CONVERSION_FILE); + + throw new DoiClientException(String.format( + "Error creating DOI: %s", + message)) + .withMessageKey("exception.doi.serverErrorCreate") + .withDescriptionKey("exception.doi.serverErrorCreate.description", new String[]{message}); } Map params = new HashMap<>(); @@ -539,7 +562,9 @@ private Element convertXmlToDataCiteFormat(String schema, Element md, String doi private void checkInitialised() throws DoiClientException { if (!initialised) { - throw new DoiClientException("DOI configuration is not complete. Check System Configuration and set the DOI configuration."); + throw new DoiClientException("DOI configuration is not complete. Check System Configuration and set the DOI configuration.") + .withMessageKey("exception.doi.configurationMissing") + .withDescriptionKey("exception.doi.configurationMissing.description", new String[]{}); } } diff --git a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java index 823545decfa..fd7f7b2699e 100644 --- a/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java +++ b/doi/src/main/java/org/fao/geonet/doi/client/DoiMedraClient.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2010 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -58,7 +58,10 @@ public String retrieveDoi(String doi) throws DoiClientException { @Override public String retrieveAllDoi(String doi) throws DoiClientException { - throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE); + throw new DoiClientException(MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE) + .withMessageKey("exception.doi.operationNotSupported") + .withDescriptionKey("exception.doi.operationNotSupported.description", + new String[]{ MEDRA_NOT_SUPPORTED_EXCEPTION_MESSAGE }); } /** diff --git a/domain/pom.xml b/domain/pom.xml index 48d5cd34fa4..152be2c0add 100644 --- a/domain/pom.xml +++ b/domain/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java index 280b24dc2a8..765b55b7b9c 100644 --- a/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java +++ b/domain/src/main/java/org/fao/geonet/repository/GeonetRepositoryImpl.java @@ -110,6 +110,7 @@ protected static Element findAllAsXml(EntityManager ent return rootEl; } + @Transactional public T update(ID id, Updater updater) { final T entity = _entityManager.find(this._entityClass, id); diff --git a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java index eb5cb852a9b..ef6f510df98 100644 --- a/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java +++ b/domain/src/main/java/org/fao/geonet/repository/LinkRepository.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -27,8 +27,8 @@ import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; +import java.util.Optional; /** * Data Access object for the {@link Link} entities. @@ -39,9 +39,7 @@ public interface LinkRepository extends GeonetRepository, LinkRep * * @return one link or null. */ - @Nullable - Link findOneByUrl(@Nonnull String url); + Optional findOneByUrl(@Nonnull String url); - @Nullable List findAllByUrlIn(@Nonnull List url); } diff --git a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java index 6b723f8eaf9..cf53f7fb36b 100644 --- a/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java +++ b/domain/src/main/java/org/fao/geonet/repository/MetadataValidationRepositoryCustom.java @@ -51,7 +51,7 @@ public interface MetadataValidationRepositoryCustom { * @param metadataId the id of the metadata. * @return the number of rows deleted */ - @Modifying(clearAutomatically=true) + @Modifying(flushAutomatically = true, clearAutomatically = true) @Transactional @Query(value="DELETE FROM MetadataValidation v where v.id.metadataId = ?1 AND valtype != 'inspire'") int deleteAllInternalValidationById_MetadataId(Integer metadataId); diff --git a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java index cd77c680549..8ddfc576891 100644 --- a/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java +++ b/domain/src/main/java/org/fao/geonet/repository/specification/LinkSpecs.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2016 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -23,31 +23,15 @@ package org.fao.geonet.repository.specification; -import org.fao.geonet.domain.Link; -import org.fao.geonet.domain.Link_; -import org.fao.geonet.domain.Metadata; -import org.fao.geonet.domain.MetadataLink; -import org.fao.geonet.domain.MetadataLink_; -import org.fao.geonet.domain.MetadataSourceInfo_; -import org.fao.geonet.domain.Metadata_; -import org.fao.geonet.domain.OperationAllowed; -import org.fao.geonet.domain.OperationAllowedId_; -import org.fao.geonet.domain.OperationAllowed_; -import org.fao.geonet.domain.ReservedGroup; -import org.fao.geonet.domain.ReservedOperation; +import com.google.common.collect.Sets; +import org.fao.geonet.domain.*; import org.springframework.data.jpa.domain.Specification; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Join; -import javax.persistence.criteria.JoinType; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; -import javax.persistence.criteria.Subquery; +import javax.persistence.criteria.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; public class LinkSpecs { private LinkSpecs() { @@ -58,92 +42,126 @@ public static Specification filter(String urlPartToContain, List associatedRecords, Integer[] groupPublishedIds, Integer[] groupOwnerIds, + Integer[] httpStatusValueFilter, + boolean excludeHarvestedMetadataFilter, Integer[] editingGroupIds) { - return new Specification() { - @Override - public Predicate toPredicate(Root root, CriteriaQuery query, CriteriaBuilder cb) { - List predicates = new ArrayList<>(); + return (root, query, cb) -> { + List predicates = new ArrayList<>(); - if (state != null) { - Path statePath = root.get(Link_.lastState); - predicates.add(cb.equal(statePath, state)); - } + if (state != null) { + Path statePath = root.get(Link_.lastState); + predicates.add(cb.equal(statePath, state)); + } - if (urlPartToContain != null) { - Path urlPath = root.get(Link_.url); - predicates.add( - cb.like(urlPath, - cb.literal(String.format("%%%s%%", urlPartToContain)))); - } + if (urlPartToContain != null) { + Path urlPath = root.get(Link_.url); + predicates.add( + cb.like(urlPath, + cb.literal(String.format("%%%s%%", urlPartToContain)))); + } + + if (associatedRecords != null) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + } + + if (excludeHarvestedMetadataFilter) { + Join metadataJoin = root.join(Link_.records, JoinType.INNER); - if (associatedRecords != null) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - predicates.add(metadataJoin.get("metadataUuid").in(associatedRecords)); + Subquery subquery = query.subquery(Integer.class); + final Root metadataRoot = subquery.from(Metadata.class); + Path isHarvestedAttributePath = metadataRoot.get(AbstractMetadata_.harvestInfo).get(MetadataHarvestInfo_.harvested_JPAWorkaround); + Predicate equalHarvestPredicate = cb.equal(isHarvestedAttributePath, cb.literal(Constants.toYN_EnabledChar(false))); + subquery.where( + equalHarvestPredicate); + + Path metadataId = metadataRoot.get(AbstractMetadata_.id); + subquery.select(metadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + query.distinct(true); + } + + if (httpStatusValueFilter != null && httpStatusValueFilter.length > 0) { + Join linkLinkStatusJoin = root.join(Link_.linkStatus, JoinType.LEFT); + + Integer[] valuesIn = Arrays.stream(httpStatusValueFilter).filter(i -> i >= 0).toArray(Integer[]::new); + Set setValuesNotIn = Sets.newHashSet(httpStatusValueFilter); + setValuesNotIn.removeAll(Arrays.asList(valuesIn)); + Integer[] valuesNotIn = setValuesNotIn.stream().map(i -> -1 * i).toArray(Integer[]::new); + + if (valuesIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesIn).map(String::valueOf).toArray())))); } - if (editingGroupIds != null && editingGroupIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - - Subquery subquery = query.subquery(Integer.class); - final Root opAllowRoot = subquery.from(OperationAllowed.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); - final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); - Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); - Predicate operationTypeEdit = - cb.equal( - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.editing.getId())); - subquery.where( - cb.or( - cb.and(metadataOperations, groupOwnerPredicate), - cb.and(editableGroups, operationTypeEdit))); - - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); - - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); + if (valuesNotIn.length > 0) { + predicates.add(cb.and( + cb.equal(linkLinkStatusJoin.get(LinkStatus_.checkDate), root.get(Link_.lastCheck)), + cb.not(linkLinkStatusJoin.get((LinkStatus_.statusValue)).in(Arrays.asList( + Arrays.stream(valuesNotIn).map(String::valueOf).toArray()))))); } + } - if (groupPublishedIds != null && groupPublishedIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Join metadataJoin = root.join(Link_.records, JoinType.INNER); + Subquery subquery = query.subquery(Integer.class); + final Root opAllowRoot = subquery.from(OperationAllowed.class); + final Root metadataRoot = subquery.from(Metadata.class); - Subquery subquery = query.subquery(Integer.class); - Root opAllowRoot = subquery.from(OperationAllowed.class); - Predicate publishedToIndicatedGroup = - opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); - Predicate operationTypeView = cb.equal( + boolean editinGroupQuery = editingGroupIds != null && editingGroupIds.length > 0; + boolean groupPublishedQuery = groupPublishedIds != null && groupPublishedIds.length > 0; + boolean groupOwnerQuery = groupOwnerIds != null && groupOwnerIds.length > 0; + + List subQueryPredicates = new ArrayList<>(); + + if (editinGroupQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(editingGroupIds); + final Predicate metadataOperations = cb.equal(metadataRoot.get(Metadata_.id), opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId)); + Predicate editableGroups = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(Arrays.asList(editingGroupIds)); + Predicate operationTypeEdit = + cb.equal( opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), - cb.literal(ReservedOperation.view.getId())); - subquery.where( - cb.and(publishedToIndicatedGroup, operationTypeView)); + cb.literal(ReservedOperation.editing.getId())); - Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); - subquery.select(opAllowedMetadataId); + subQueryPredicates.add(cb.or( + cb.and(metadataOperations, groupOwnerPredicate), + cb.and(editableGroups, operationTypeEdit))); + } - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } + if (groupPublishedQuery) { + Predicate publishedToIndicatedGroup = + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.groupId).in(groupPublishedIds); + Predicate operationTypeView = cb.equal( + opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.operationId), + cb.literal(ReservedOperation.view.getId())); - if (groupOwnerIds != null && groupOwnerIds.length > 0) { - Join metadataJoin = root.join(Link_.records, JoinType.INNER); - Subquery subquery = query.subquery(Integer.class); - final Root metadataRoot = subquery.from(Metadata.class); - final Predicate groupOwnerPredicate = - metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - subquery.where(groupOwnerPredicate); + subQueryPredicates.add(cb.and(publishedToIndicatedGroup, operationTypeView)); + } - Path metadataId = metadataRoot.get(Metadata_.id); - subquery.select(metadataId); + if (groupOwnerQuery) { + final Predicate groupOwnerPredicate = + metadataRoot.get(Metadata_.sourceInfo).get(MetadataSourceInfo_.groupOwner).in(groupOwnerIds); - predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); - query.distinct(true); - } - return cb.and(predicates.toArray(new Predicate[]{})); + subQueryPredicates.add(groupOwnerPredicate); } + + + if (subQueryPredicates.size() > 0) { + subquery.where(subQueryPredicates.toArray(new Predicate[]{})); + + Path opAllowedMetadataId = opAllowRoot.get(OperationAllowed_.id).get(OperationAllowedId_.metadataId); + subquery.select(opAllowedMetadataId); + + predicates.add(metadataJoin.get(MetadataLink_.metadataId).in(subquery)); + } + + query.distinct(true); + + return cb.and(predicates.toArray(new Predicate[]{})); }; } } diff --git a/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java new file mode 100644 index 00000000000..4bcdcab74c7 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/LinkRespositoryTest.java @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository; + +import org.fao.geonet.domain.Link; +import org.fao.geonet.domain.LinkType; +import org.junit.Assert; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + + +public class LinkRespositoryTest extends AbstractSpringDataTest { + + @Autowired + private LinkRepository repository; + + @Test + public void testFindAllByUrlIn() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + List links = new ArrayList<>(); + links.add(link.getUrl()); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(1, linkList.size()); + Assert.assertEquals(link.getUrl(), linkList.get(0).getUrl()); + } + + @Test + public void testFindAllByUrlInNoResults() { + List links = new ArrayList<>(); + links.add("https://test.com/link"); + List linkList = repository.findAllByUrlIn(links); + + Assert.assertNotNull(linkList); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testFindOneByUrl() { + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + + repository.save(link); + + Optional linkToCheck = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(linkToCheck); + Assert.assertTrue(linkToCheck.isPresent()); + Assert.assertEquals(link.getUrl(), linkToCheck.get().getUrl()); + } + + @Test + public void testFindOneByUrlNoResult() { + Optional link = repository.findOneByUrl("https://test.com/link"); + + Assert.assertNotNull(link); + Assert.assertTrue(link.isEmpty()); + } + +} diff --git a/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java new file mode 100644 index 00000000000..6f025abd585 --- /dev/null +++ b/domain/src/test/java/org/fao/geonet/repository/specification/LinkSpecsTest.java @@ -0,0 +1,324 @@ +/* + * Copyright (C) 2001-2024 Food and Agriculture Organization of the + * United Nations (FAO-UN), United Nations World Food Programme (WFP) + * and United Nations Environment Programme (UNEP) + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or (at + * your option) any later version. + * + * This program is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + * + * Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2, + * Rome - Italy. email: geonetwork@osgeo.org + */ + +package org.fao.geonet.repository.specification; + +import org.fao.geonet.domain.*; +import org.fao.geonet.repository.*; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +public class LinkSpecsTest extends AbstractSpringDataTest { + @Autowired + MetadataRepository metadataRepository; + + @Autowired + MetadataLinkRepository metadataLinkRepository; + + @Autowired + LinkStatusRepository linkStatusRepository; + + @Autowired + OperationAllowedRepository operationAllowedRepository; + + @Autowired + LinkRepository linkRepository; + + AtomicInteger inc = new AtomicInteger(); + + @Before + public void createTestData() { + // Create a non harvested metadata + Metadata metadata = MetadataRepositoryTest.newMetadata(inc); + metadata.getSourceInfo().setGroupOwner(2); + metadataRepository.save(metadata); + + Link link = new Link(); + link.setLinkType(LinkType.HTTP); + link.setUrl("https://test.com/link"); + link.setLastState(1); + + ISODate checkStatusDate = new ISODate(); + + Set linkStatuses = new HashSet<>(); + LinkStatus linkStatus = new LinkStatus(); + linkStatus.setLink(link); + linkStatus.setStatusValue("200"); + linkStatus.setCheckDate(checkStatusDate); + linkStatuses.add(linkStatus); + + link.setLinkStatus(linkStatuses); + + MetadataLink metadataLink = new MetadataLink(); + metadataLink.setMetadataId(metadata.getId()); + metadataLink.setMetadataUuid(metadata.getUuid()); + metadataLink.setLink(link); + + Set recordLinks = new HashSet<>(); + recordLinks.add(metadataLink); + link.setRecords(recordLinks); + link.setLastCheck(checkStatusDate); + linkRepository.save(link); + + metadataLinkRepository.save(metadataLink); + linkStatusRepository.save(linkStatus); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd1 = new OperationAllowedId(); + operationAllowedIdViewMd1.setMetadataId(metadata.getId()); + operationAllowedIdViewMd1.setGroupId(2); + operationAllowedIdViewMd1.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd1.setId(operationAllowedIdViewMd1); + operationAllowedRepository.save(operationAllowedViewMd1); + + // Edit in group 3 + OperationAllowed operationAllowedEditMd1 = new OperationAllowed(); + OperationAllowedId operationAllowedIdEditMd1 = new OperationAllowedId(); + operationAllowedIdEditMd1.setMetadataId(metadata.getId()); + operationAllowedIdEditMd1.setGroupId(3); + operationAllowedIdEditMd1.setOperationId(ReservedOperation.editing.getId()); + operationAllowedEditMd1.setId(operationAllowedIdEditMd1); + operationAllowedRepository.save(operationAllowedEditMd1); + + // Create a harvested metadata + Metadata metadata2 = MetadataRepositoryTest.newMetadata(inc); + metadata2.getSourceInfo().setGroupOwner(2); + MetadataHarvestInfo metadataHarvestInfo = new MetadataHarvestInfo(); + metadataHarvestInfo.setHarvested(true); + metadataHarvestInfo.setUuid(UUID.randomUUID().toString()); + metadata2.setHarvestInfo(metadataHarvestInfo); + + metadataRepository.save(metadata2); + + Link link2 = new Link(); + link2.setLinkType(LinkType.HTTP); + link2.setUrl("https://test.com/link2"); + link2.setLastCheck(checkStatusDate); + link2.setLastState(-1); + + Set linkStatuses2 = new HashSet<>(); + LinkStatus linkStatus2 = new LinkStatus(); + linkStatus2.setLink(link2); + linkStatus2.setStatusValue("404"); + linkStatus2.setCheckDate(checkStatusDate); + linkStatuses2.add(linkStatus2); + + link2.setLinkStatus(linkStatuses2); + + MetadataLink metadataLink2 = new MetadataLink(); + metadataLink2.setMetadataId(metadata2.getId()); + metadataLink2.setMetadataUuid(metadata2.getUuid()); + metadataLink2.setLink(link2); + + Set recordLinks2 = new HashSet<>(); + recordLinks2.add(metadataLink2); + link2.setRecords(recordLinks2); + linkRepository.save(link2); + + metadataLinkRepository.save(metadataLink2); + linkStatusRepository.save(linkStatus2); + + // View in group 2, edit in group 2 in implicit from metadata owner group + OperationAllowed operationAllowedViewMd2 = new OperationAllowed(); + OperationAllowedId operationAllowedIdViewMd2 = new OperationAllowedId(); + operationAllowedIdViewMd2.setMetadataId(metadata2.getId()); + operationAllowedIdViewMd2.setGroupId(2); + operationAllowedIdViewMd2.setOperationId(ReservedOperation.view.getId()); + operationAllowedViewMd2.setId(operationAllowedIdViewMd2); + operationAllowedRepository.save(operationAllowedViewMd2); + } + + @Test + public void testLinkSpecsFilterUrlPartToContainMatch() { + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter("https://test.com", null, null, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + + } + + @Test + public void testLinkSpecsFilterUrlPartToContainNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter("https://test2.com", null, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsMatch() { + List associatedRecords = metadataRepository.findAll().stream().map(Metadata::getUuid).collect(Collectors.toList()); + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterAssociatedRecordsNoMatch() { + List associatedRecords = new ArrayList<>(); + associatedRecords.add("aaaa"); + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, associatedRecords, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterMatch() { + Integer[] httpStatusValueFilter = new Integer[]{200, 404}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterHttpStatusFilterNoMatch() { + Integer[] httpStatusValueFilter = new Integer[]{500}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, httpStatusValueFilter, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsMatch() { + Integer[] groupOwnerIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupOwnersIdsNoMatch() { + Integer[] groupOwnerIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsMatch() { + Integer[] groupPublishedIds = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, true, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(2, linkList2.size()); + } + + @Test + public void testLinkSpecsFilterGroupPublishedIdsNoMatch() { + Integer[] groupPublishedIds = new Integer[]{3}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsMatch() { + Integer[] editingGroupIds1 = new Integer[]{2}; + + // Query excluding harvested metadata + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds1)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds1)); + Assert.assertEquals(2, linkList2.size()); + + Integer[] editingGroupIds2 = new Integer[]{3}; + + // Query excluding harvested metadata + List linkList3 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, true, editingGroupIds2)); + Assert.assertEquals(1, linkList3.size()); + + // Query not excluding harvested metadata + List linkList4 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds2)); + Assert.assertEquals(1, linkList4.size()); + } + + @Test + public void testLinkSpecsFilterEditingGroupIdsNoMatch() { + Integer[] editingGroupIds = new Integer[]{4}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, null, null, false, editingGroupIds)); + Assert.assertEquals(0, linkList.size()); + } + + @Test + public void testLinkSpecsStateMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, -1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Query not excluding harvested metadata + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, 1, null, null, null, null, false, null)); + Assert.assertEquals(1, linkList2.size()); + } + + @Test + public void testLinkSpecsStateNoMatch() { + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 0, null, null, null, null, false, null)); + Assert.assertEquals(0, linkList.size()); + } + + + @Test + public void testLinkSpecsSeveralFilters() { + // Find links with state 1, related to metadata published to group 2 + Integer[] groupPublishedIds = new Integer[]{2}; + + List linkList = linkRepository.findAll(LinkSpecs.filter(null, 1, null, groupPublishedIds, null, null, false, null)); + Assert.assertEquals(1, linkList.size()); + + // Find links that contain the url 'https://test.com', with http status 200 / 404 / 500, related to metadata owned by groups 2 / 3 + Integer[] httpStatusValueFilter = new Integer[]{200, 404, 500}; + Integer[] groupOwnerIds = new Integer[]{2, 3}; + + List linkList2 = linkRepository.findAll(LinkSpecs.filter(null, null, null, null, groupOwnerIds, httpStatusValueFilter, false, null)); + Assert.assertEquals(2, linkList2.size()); + } +} diff --git a/es/README.md b/es/README.md index c27cee0b4b0..0ef407c7f22 100644 --- a/es/README.md +++ b/es/README.md @@ -1,33 +1,48 @@ # Install, configure and start Elasticsearch +## Installation options + This section describes several methods for configuring Elasticsearch for development. These configurations should not be used for a production deployment. -## Manual installation +### Docker installation (Recommended) -1. Download Elasticsearch 8.x (tested with 8.11.3 for Geonetwork 4.4.x) from https://www.elastic.co/downloads/elasticsearch -and copy to the ES module, e.g., es/elasticsearch-8.11.3 +1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): -2. Disable the security + ``` + docker pull docker.elastic.co/elasticsearch/elasticsearch:8.11.3 + ``` -Elasticsearch 8 has security enabled by default. To disable this configuration for development, update the file `config/elasticsearch.yml` adding at the end: +2. Use docker run, leaving 9200 available: -``` -xpack.security.enabled: false -xpack.security.enrollment.enabled: false -``` + ``` + docker run -p 9200:9200 -p 9300:9300 \ + -e "discovery.type=single-node" \ + -e "xpack.security.enabled=false" \ + -e "xpack.security.enrollment.enabled=false" \ + docker.elastic.co/elasticsearch/elasticsearch:8.11.3 + ``` + +3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser +### Docker compose installation -3. Start ES using: +1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): - ```shell script - ./bin/elasticsearch + ``` + cd es + docker-compose up ``` -4. Check that elasticsearch is running by visiting http://localhost:9200 in a browser +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kibana: http://localhost:5601 + +### Maven installation -## Maven installation +Maven installation ensure you always are using the ``es.version`` version specified in ``pom.xml``. 1. Maven can take care of the installation steps: @@ -44,42 +59,32 @@ xpack.security.enrollment.enabled: false ``` 3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser -## Docker installation +## Manual installation -1. Use docker pull to download the image (you can check version in the :file:`pom.xml` file): +1. Download Elasticsearch 8.11.3 from https://www.elastic.co/downloads/elasticsearch +and copy to the ES module, e.g., ``es/elasticsearch-8.11.3` - ``` - docker pull docker.elastic.co/elasticsearch/elasticsearch:8.11.3 - ``` +2. Disable the security -2. Use docker run, leaving 9200 available: + Elasticsearch 8 has security enabled by default. To disable this configuration for development, update the file `config/elasticsearch.yml` adding at the end: ``` - docker run -p 9200:9200 -p 9300:9300 \ - -e "discovery.type=single-node" \ - -e "xpack.security.enabled=false" \ - -e "xpack.security.enrollment.enabled=false" \ - docker.elastic.co/elasticsearch/elasticsearch:8.11.3 + xpack.security.enabled: false + xpack.security.enrollment.enabled: false ``` -3. Check that elasticsearch is running by visiting http://localhost:9200 in a browser - -## Docker compose installation - -1. Use docker compose with the provided [docker-compose.yml](docker-compose.yml): +3. Start ES using: - ``` - cd es - docker-compose up + ```shell script + ./bin/elasticsearch ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kibana: http://localhost:5601 +4. Check that elasticsearch is running by visiting http://localhost:9200 in a browser # Configuration +## Index management + Optionally you can manually create index but they will be created by the catalogue when the Elastic instance is available and if index does not exist. @@ -122,7 +127,7 @@ Don't hesitate to propose a Pull Request with the new language. 1. Configure ES to start on server startup. It is recommended to protect `gn-records` index from the Internet access. - * Note that for debian-based servers the current deb download (7.3.2) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html + * Note that for debian-based servers the current deb download (8.11.3) can be installed rather than installing manually and can be configured to run as a service using the instructions here: https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html # Troubleshoot @@ -163,3 +168,22 @@ field expansion for [*] matches too many fields, limit: 1024 An option is to restrict `queryBase` to limit the number of field to query on. `any:(${any}) resourceTitleObject.default:(${any})^2` is a good default. Using `${any}` will probably trigger the error if the number of records is high. The other option is to increase `indices.query.bool.max_clause_count`. + + +## Disk space threshold + +The server application will refuse to write new content unless there is enough free space available (by default 1/4 of your hard drive). + +To turn off this check: + +``` + curl -XPUT http://localhost:9200/_cluster/settings -H 'Content-Type: application/json' -d '{ "transient" : { "cluster.routing.allocation.disk.threshold_enabled" : false } }' +``` + +## Blocked by index read-only / allow delete + +To recover: + +``` +curl -XPUT -H "Content-Type: application/json" http://localhost:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' +``` diff --git a/es/es-dashboards/README.md b/es/es-dashboards/README.md index b95aa299cb4..b39111ae58e 100644 --- a/es/es-dashboards/README.md +++ b/es/es-dashboards/README.md @@ -1,24 +1,20 @@ # Install, configure and start Kibana -## Manual installation - -Download Kibana from https://www.elastic.co/downloads/kibana. For Geonetwork 3.8.x download at least version 7.2.1 +## Installation options -Set Kibana base path and index name in config/kibana.yml: +### Docker compose installation (Recommended) -``` -server.basePath: "/geonetwork/dashboards" -server.rewriteBasePath: false -``` +1. Use docker compose with the provided [docker-compose.yml](es/docker-compose.yml): -Adapt if needed ```elasticsearch.url``` and ```server.host```. - -Start Kibana manually: + ``` + cd es + docker-compose up + ``` -``` -cd kibana/bin -./kibana -``` +3. Check that it is running using your browser: + + * Elasticsearch: http://localhost:9200 + * Kabana: http://localhost:5601 ## Maven installation @@ -41,20 +37,26 @@ cd kibana/bin mvn exec:exec -Dkb-start ``` -## Docker compose installation +## Manual installation + +1. Download Kibana 8.11.3 from https://www.elastic.co/downloads/kibana + +2. Set Kibana base path and index name in config/kibana.yml: + + ``` + server.basePath: "/geonetwork/dashboards" + server.rewriteBasePath: false + ``` + +3. Adapt if needed ```elasticsearch.url``` and ```server.host```. -1. Use docer compose with the provided [docker-compose.yml](docker-compose.yml): +4. Start Kibana manually: ``` - cd es - docker-compose up + cd kibana/bin + ./kibana ``` -3. Check that it is running using your browser: - - * Elasticsearch: http://localhost:9200 - * Kabana: http://localhost:5601 - ## Import Configuration 1. Kibana should be running from: @@ -69,16 +71,17 @@ cd kibana/bin http://localhost:8080/geonetwork/dashboards ``` + ## Troubleshoot If it does not start properly, check Kibana log files (eg. it may fail if Elasticsearch version is not compatible with Kibana version). -Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/4.0.x/es/es-dashboards/data/export.ndjson +Visit Kibana in a browser using one of the above links and go to 'Saved Objects'. Import export.ndjson from https://github.com/geonetwork/core-geonetwork/blob/main/es/es-dashboards/data/export.ndjson ### Production Use -Kibana can be installed from the debian files, and 7.3.2 is confirmed as working with Geonetwork 3.8.x. +Kibana can be installed from the debian files, and Kibana 8.11.3 is confirmed as working with Geonetwork 4.4.x. Set Kibana to start when the server starts up, using the instructions at https://www.elastic.co/guide/en/kibana/current/start-stop.html diff --git a/es/es-dashboards/pom.xml b/es/es-dashboards/pom.xml index cc49025ac53..9450b612660 100644 --- a/es/es-dashboards/pom.xml +++ b/es/es-dashboards/pom.xml @@ -24,12 +24,74 @@ 4.0.0 gn-es-dashboards GeoNetwork dashboard app based on Kibana - gn-es org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("Kibana (\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Kibana (\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + true + + + + + + + kb-download diff --git a/es/pom.xml b/es/pom.xml index 5e6a40550da..27e3aac1085 100644 --- a/es/pom.xml +++ b/es/pom.xml @@ -5,13 +5,112 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 gn-es GeoNetwork index using Elasticsearch pom + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + check-readme + + enforce + + + + + Update README.md examples for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + esVersion = "${es.version}"; + print("Scanning README for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + download = Pattern.compile("Download Elasticsearch (\\d.\\d\\d.\\d)"); + folder = Pattern.compile("es/elasticsearch-(\\d.\\d\\d.\\d)"); + + patterns = new Pattern[]{ docker, download, folder}; + + readme = new BufferedReader(new FileReader("README.md")); + + number = 0; + while ((line = readme.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print("README.md:"+number+" FAILURE: " + line); + return false; + } + } + } + } + readme.close(); + true; + + + + + + + check-docker + + enforce + + + + + Update docker-compose.yml for Elasticsearch ${es.version} + + import java.util.regex.Pattern; + + boolean scanDockerCompose(String filename){ + esVersion = "${es.version}"; + print("Scanning "+filename+" for " + esVersion); + + docker = Pattern.compile("elasticsearch:(\\d.\\d\\d.\\d)"); + kibana = Pattern.compile("kibana:(\\d.\\d\\d.\\d)"); + patterns = new Pattern[]{ docker, kibana}; + + reader = new BufferedReader(new FileReader("${project.basedir}"+"/"+filename)); + + number = 0; + while ((line = reader.readLine()) != null) { + number++; + for (pattern : patterns ){ + matcher = pattern.matcher(line); + if (matcher.find()) { + if (!esVersion.equals(matcher.group(1))) { + print(filename+":"+number+" FAILURE: " + line); + return false; + } + } + } + } + reader.close(); + return true; + } + + return scanDockerCompose("docker-compose.yml"); + + + + + + + + + + es-download diff --git a/estest/pom.xml b/estest/pom.xml index e9ba611bc41..6c7716d8367 100644 --- a/estest/pom.xml +++ b/estest/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/events/pom.xml b/events/pom.xml index 8b8405f0b5e..c140f3e4d02 100644 --- a/events/pom.xml +++ b/events/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT GeoNetwork Events diff --git a/harvesters/pom.xml b/harvesters/pom.xml index c830794b85b..65484c3489f 100644 --- a/harvesters/pom.xml +++ b/harvesters/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java index d25c803cb68..b15c89ab302 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/UriMapper.java @@ -1,5 +1,5 @@ //============================================================================= -//=== Copyright (C) 2001-2007 Food and Agriculture Organization of the +//=== Copyright (C) 2001-2024 Food and Agriculture Organization of the //=== United Nations (FAO-UN), United Nations World Food Programme (WFP) //=== and United Nations Environment Programme (UNEP) //=== @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Optional; import org.fao.geonet.domain.AbstractMetadata; import org.fao.geonet.kernel.datamanager.IMetadataUtils; @@ -41,7 +42,7 @@ */ public class UriMapper { - private HashMap> hmUriRecords = new HashMap>(); + private HashMap> hmUriRecords = new HashMap<>(); //-------------------------------------------------------------------------- //--- @@ -49,21 +50,21 @@ public class UriMapper { //--- //-------------------------------------------------------------------------- - public UriMapper(ServiceContext context, String harvestUuid) throws Exception { + public UriMapper(ServiceContext context, String harvestUuid) { final IMetadataUtils metadataRepository = context.getBean(IMetadataUtils.class); final List metadataList = metadataRepository.findAll(MetadataSpecs.hasHarvesterUuid(harvestUuid)); - for (AbstractMetadata record : metadataList) { - String uri = record.getHarvestInfo().getUri(); + for (AbstractMetadata metadataRecord : metadataList) { + String uri = Optional.ofNullable(metadataRecord.getHarvestInfo().getUri()).orElse(""); - List records = hmUriRecords.get(uri); + List records = hmUriRecords.computeIfAbsent(uri, k -> new ArrayList<>()); if (records == null) { - records = new ArrayList(); + records = new ArrayList<>(); hmUriRecords.put(uri, records); } - records.add(new RecordInfo(record)); + records.add(new RecordInfo(metadataRecord)); } } diff --git a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java index 9cf652b1b1b..e2c4065c367 100644 --- a/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java +++ b/harvesters/src/main/java/org/fao/geonet/kernel/harvest/harvester/geonet/Aligner.java @@ -30,6 +30,7 @@ import org.fao.geonet.GeonetContext; import org.fao.geonet.Logger; import org.fao.geonet.MetadataResourceDatabaseMigration; +import org.fao.geonet.api.exception.ResourceNotFoundException; import org.fao.geonet.api.records.attachments.Store; import org.fao.geonet.constants.Geonet; import org.fao.geonet.domain.*; @@ -77,9 +78,9 @@ public class Aligner extends BaseAligner { private UUIDMapper localUuids; private String processName; private String preferredSchema; - private Map processParams = new HashMap(); + private Map processParams = new HashMap<>(); private MetadataRepository metadataRepository; - private Map> hmRemoteGroups = new HashMap>(); + private Map> hmRemoteGroups = new HashMap<>(); private SettingManager settingManager; public Aligner(AtomicBoolean cancelMonitor, Logger log, ServiceContext context, XmlRequest req, @@ -119,7 +120,7 @@ private void setupLocEntity(List list, Map> for (Element entity : list) { String name = entity.getChildText("name"); - Map hm = new HashMap(); + Map hm = new HashMap<>(); hmEntity.put(name, hm); @SuppressWarnings("unchecked") @@ -163,7 +164,7 @@ public HarvestResult align(SortedSet records, List err result.locallyRemoved++; } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't remove metadata with uuid " + uuid); log.error(t); result.unchangedMetadata++; @@ -197,7 +198,6 @@ public HarvestResult align(SortedSet records, List err String id = dataMan.getMetadataId(ri.uuid); // look up value of localrating/enable - SettingManager settingManager = context.getBean(SettingManager.class); String localRating = settingManager.getValue(Settings.SYSTEM_LOCALRATING_ENABLE); if (id == null) { @@ -230,6 +230,7 @@ public HarvestResult align(SortedSet records, List err case SKIP: log.debug("Skipping record with uuid " + ri.uuid); result.uuidSkipped++; + break; default: break; } @@ -248,7 +249,7 @@ public HarvestResult align(SortedSet records, List err } } - } catch (Throwable t) { + } catch (Exception t) { log.error("Couldn't insert or update metadata with uuid " + ri.uuid); log.error(t); result.unchangedMetadata++; @@ -282,7 +283,7 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme Log.debug(Geonet.MEF, "Multiple metadata files"); Map> mdFiles = - new HashMap>(); + new HashMap<>(); for (Path file : files) { if (Files.isRegularFile(file)) { Element metadata = Xml.loadFile(file); @@ -353,8 +354,8 @@ private Element extractValidMetadataForImport(DirectoryStream files, Eleme } private void addMetadata(final RecordInfo ri, final boolean localRating, String uuid) throws Exception { - final String id[] = {null}; - final Element md[] = {null}; + final String[] id = {null}; + final Element[] md = {null}; //--- import metadata from MEF file @@ -595,13 +596,13 @@ private void addPrivilegesFromGroupPolicy(String id, Element privil) throws Exce } private Map> buildPrivileges(Element privil) { - Map> map = new HashMap>(); + Map> map = new HashMap<>(); for (Object o : privil.getChildren("group")) { Element group = (Element) o; String name = group.getAttributeValue("name"); - Set set = new HashSet(); + Set set = new HashSet<>(); map.put(name, set); for (Object op : group.getChildren("operation")) { @@ -662,9 +663,9 @@ private String createGroup(String name) throws Exception { */ private void updateMetadata(final RecordInfo ri, final String id, final boolean localRating, final boolean useChangeDate, String localChangeDate, Boolean force) throws Exception { - final Element md[] = {null}; - final Element publicFiles[] = {null}; - final Element privateFiles[] = {null}; + final Element[] md = {null}; + final Element[] publicFiles = {null}; + final Element[] privateFiles = {null}; if (localUuids.getID(ri.uuid) == null && !force) { if (log.isDebugEnabled()) @@ -756,7 +757,6 @@ private void updateMetadata(RecordInfo ri, String id, Element md, return; } - final IMetadataManager metadataManager = context.getBean(IMetadataManager.class); Metadata metadata; if (!force && !ri.isMoreRecentThan(date)) { if (log.isDebugEnabled()) @@ -883,12 +883,18 @@ private void saveFile(final Store store, String metadataUuid, String file, ISODate remIsoDate = new ISODate(changeDate); boolean saveFile; - final MetadataResource description = store.getResourceDescription(context, metadataUuid, visibility, file, true); - if (description == null) { - saveFile = true; - } else { - ISODate locIsoDate = new ISODate(description.getLastModification().getTime(), false); + Store.ResourceHolder resourceHolder; + try { + resourceHolder = store.getResource(context, metadataUuid, visibility, file, true); + } catch (ResourceNotFoundException ex) { + resourceHolder = null; + } + + if ((resourceHolder != null) && (resourceHolder.getMetadata() != null)) { + ISODate locIsoDate = new ISODate(resourceHolder.getMetadata().getLastModification().getTime(), false); saveFile = (remIsoDate.timeDifferenceInSeconds(locIsoDate) > 0); + } else { + saveFile = true; } if (saveFile) { diff --git a/healthmonitor/pom.xml b/healthmonitor/pom.xml index 805dae47790..8bc36c32cb3 100644 --- a/healthmonitor/pom.xml +++ b/healthmonitor/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java index d3afc90e4f1..3c60aa7f000 100644 --- a/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java +++ b/healthmonitor/src/main/java/org/fao/geonet/monitor/health/NoIndexErrorsHealthCheck.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2001-2023 Food and Agriculture Organization of the + * Copyright (C) 2001-2024 Food and Agriculture Organization of the * United Nations (FAO-UN), United Nations World Food Programme (WFP) * and United Nations Environment Programme (UNEP) * @@ -44,7 +44,8 @@ protected Result check() throws Exception { GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); EsSearchManager searchMan = gc.getBean(EsSearchManager.class); - long numDocs = searchMan.getNumDocs("+" + IndexFields.INDEXING_ERROR_FIELD + ":true"); + long numDocs = searchMan.getNumDocs("-" + IndexFields.INDEXING_ERROR_MSG + ".type:warning +" + + IndexFields.INDEXING_ERROR_FIELD + ":true"); if (numDocs > 0) { return Result.unhealthy(String.format("Found %d metadata that had errors during indexing", numDocs)); diff --git a/index/pom.xml b/index/pom.xml index 9e82d3e9130..224880122f1 100644 --- a/index/pom.xml +++ b/index/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 gn-index diff --git a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java index 2f031f726b2..4b21f7c61a3 100644 --- a/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java +++ b/index/src/main/java/org/fao/geonet/index/es/EsRestClient.java @@ -413,47 +413,23 @@ public Map getDocument(String index, String id) throws Exception /** * Query the index for a specific record and return values for a set of fields. */ - public Map getFieldsValues(String index, String id, Set fields) throws IOException { + public Map getFieldsValues(String index, String id, Set fields, String language) throws Exception { if (!activated) { return Collections.emptyMap(); } - Map fieldValues = new HashMap<>(fields.size()); - try { - String query = String.format("_id:\"%s\"", id); - // TODO: Check maxRecords - // TODO: Use _doc API? - - - final SearchResponse searchResponse = this.query(index, query, null, fields, new HashMap<>(), 0, 1, null); - - List totalHits = searchResponse.hits().hits(); - long matches = totalHits.size(); - if (matches == 0) { - return fieldValues; - } else if (matches == 1) { - final Hit hit = totalHits.get(0); - - fields.forEach(f -> { - final Object o = hit.fields().get(f); - if (o instanceof String) { - fieldValues.put(f, (String) o); - } else if (o instanceof HashMap && f.endsWith("Object")) { - fieldValues.put(f, (String) ((HashMap) o).get("default")); - } - }); - } else { - throw new IOException(String.format( - "Your query '%s' returned more than one record, %d in fact. Can't retrieve field values for more than one record.", - query, - matches - )); + Map fieldValues = new HashMap<>(); + Map sources = getDocument(index, id); + + for (String field : fields) { + Object value = sources.get(field); + if (value instanceof String) { + fieldValues.put(field, (String) value); + } else if (value instanceof Map && field.endsWith("Object")) { + Map valueMap = (Map) value; + String languageValue = (String) valueMap.get("lang" + language); + fieldValues.put(field, languageValue != null ? languageValue : (String) valueMap.get("default")); } - - } catch (Exception e) { - throw new IOException(String.format( - "Error during fields value retrieval. Errors is '%s'.", e.getMessage() - )); } return fieldValues; } diff --git a/inspire-atom/pom.xml b/inspire-atom/pom.xml index 31849c58ccb..421005aa62d 100644 --- a/inspire-atom/pom.xml +++ b/inspire-atom/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/jmeter/pom.xml b/jmeter/pom.xml index bf6daf9d308..5711045e1ef 100644 --- a/jmeter/pom.xml +++ b/jmeter/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT diff --git a/listeners/pom.xml b/listeners/pom.xml index d46c1217e63..2efa177df1d 100644 --- a/listeners/pom.xml +++ b/listeners/pom.xml @@ -28,7 +28,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT GeoNetwork Events diff --git a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java index 06299c4be91..b9e052d5e99 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java +++ b/listeners/src/main/java/org/fao/geonet/listener/history/GenericMetadataEventListener.java @@ -70,7 +70,7 @@ public final void handleEvent(AbstractHistoryEvent event) { */ public final void storeContentHistoryEvent(AbstractHistoryEvent event) { - if(settingManager.getValueAsBool(Settings.SYSTEM_METADATA_HISTORY_ENABLED)) { + if(settingManager.getValueAsBool(Settings.METADATA_HISTORY_ENABLED)) { Integer metadataid = Math.toIntExact(event.getMdId()); diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java index 823a76e2ea5..b335fc9cdec 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApprovePublishedRecord.java @@ -121,7 +121,7 @@ private void changeToApproved(AbstractMetadata md, MetadataStatus previousStatus status.setChangeDate(new ISODate()); status.setUserId(ServiceContext.get().getUserSession().getUserIdAsInt()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, false); Log.trace(Geonet.DATA_MANAGER, "Metadata with id " + md.getId() + " automatically approved due to publishing."); } diff --git a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java index 1a315ca05af..a987943bf39 100644 --- a/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java +++ b/listeners/src/main/java/org/fao/geonet/listener/metadata/draft/ApproveRecord.java @@ -148,7 +148,7 @@ private AbstractMetadata approveWithDraft(MetadataStatusChanged event) throws Nu status.setChangeDate(new ISODate()); status.setUserId(event.getUser()); - metadataStatus.setStatusExt(status); + metadataStatus.setStatusExt(status, false); } else if (md instanceof Metadata) { draft = null; //metadataDraftRepository.findOneByUuid(md.getUuid()); diff --git a/messaging/pom.xml b/messaging/pom.xml index 54eb690f6d2..ebdf4a2c491 100644 --- a/messaging/pom.xml +++ b/messaging/pom.xml @@ -5,7 +5,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 diff --git a/oaipmh/pom.xml b/oaipmh/pom.xml index efa7fe0facc..b461cbe67ee 100644 --- a/oaipmh/pom.xml +++ b/oaipmh/pom.xml @@ -30,7 +30,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT diff --git a/pom.xml b/pom.xml index 11906a4e3a4..87401d9d9ab 100644 --- a/pom.xml +++ b/pom.xml @@ -29,7 +29,7 @@ org.geonetwork-opensource geonetwork pom - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT GeoNetwork opensource GeoNetwork opensource is a standards based, Free and Open Source catalog application to manage spatially referenced @@ -245,12 +245,36 @@ maven-toolchains-plugin 3.0.0 + + org.apache.maven.plugins + maven-enforcer-plugin + 3.4.1 + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-maven + + enforce + + + + + 3.8.3 + + + + + + + maven-compiler-plugin 11 @@ -269,7 +293,6 @@ - org.apache.maven.plugins maven-resources-plugin UTF-8 @@ -295,7 +318,6 @@ - org.apache.maven.plugins maven-surefire-plugin org.fao.geonet.repository.AbstractSpringDataTest @@ -1586,7 +1608,7 @@ request the list of hosts (but JPA cache db queries). --> * - 9.4.53.v20231009 + 9.4.54.v20240208 jetty-distribution-${jetty.version} https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-distribution/${jetty.version}/${jetty.file}.tar.gz @@ -1596,11 +1618,11 @@ 1.19.0 42.6.0 - 5.3.31 - 5.8.8 + 5.3.33 + 5.8.11 2.7.18 2.7.0 - 1.5.13 + 1.8.0 5.6.15.Final 2.2.0 @@ -1611,7 +1633,7 @@ true 2.7 2.1.1 - 2.15.3 + 2.16.2 9.1.22 2.25.1 2.17.2 diff --git a/release-build.sh b/release-build.sh new file mode 100755 index 00000000000..8f84c41974b --- /dev/null +++ b/release-build.sh @@ -0,0 +1,85 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Buuilding release for version ${newversion} (from ${currentversion})." +echo "" +echo "Before you start:" +echo "1. Use web-ui/download-from-transifex.sh to update translations" +echo "2. Use release-notes.sh to update change log and release notes" +echo "" +echo "After being build you can test the release before publishing. Git branch ${gitBranch}." +read -p "Press enter to continue" + +# Update version number (in pom.xml, installer config and SQL) +./update-version.sh $currentversion $newversion + +# Then commit the new version +git add . +git commit -m "Update version to $newversion" +git tag -a $version -m "Tag for $version release" + +# Build the new release +mvn clean install -DskipTests -ntp -Pwar -Pwro4j-prebuild-cache + +(cd datastorages && mvn clean install -DskipTests -ntp -Drelease -DskipTests) + +# Download Jetty and create the installer +(cd release && mvn clean install -Pjetty-download && ant) + +# generate checksum for download artifacts + +if [ -f "release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5" ]; then + rm release/target/GeoNetwork-$version/geonetwork-bundle-$newversion.zip.MD5 +fi + +if [[ ${OSTYPE:0:6} == 'darwin' ]]; then + md5 -r web/target/geonetwork.war > web/target/geonetwork.war.md5 + md5 -r release/target/GeoNetwork-$newversion/geonetwork-bundle-$newversion.zip > release/target/GeoNetwork-$newversion/geonetwork-bundle-$newversion.zip.md5 +else + (cd web/target && md5sum geonetwork.war > geonetwork.war.md5) + (cd release/target/GeoNetwork-$version && md5sum geonetwork-bundle-$newversion.zip > geonetwork-bundle-$newversion.zip.md5) +fi diff --git a/release-notes.sh b/release-notes.sh new file mode 100755 index 00000000000..0c4e40c3d00 --- /dev/null +++ b/release-notes.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +function showUsage +{ + echo -e "\nThis script is used to build a release for the current branch" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +echo "Creating change log and release notes for version ${newversion} (from ${currentversion}). Git branch ${gitBranch}:" +echo " docs/changes/changes$newversion.txt" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "When generated please review and update:" +echo " docs/manual/mkdocs.yml" +echo " docs/manual/docs/overview/latest/index.md" +echo " docs/manual/docs/overview/change-log/version-$mainVersion.md" +echo "" +read -p "Press enter to continue" + +# Generate list of changes +cat < docs/changes/changes$newversion.txt +================================================================================ +=== +=== GeoNetwork $version: List of changes +=== +================================================================================ +EOF +git log --pretty='format:- %s' $previousversion... >> docs/changes/changes$newversion.txt + +# Generate release notes + +cat < docs/manual/docs/overview/change-log/version-$mainVersion.md +# Version $mainVersion + +GeoNetwork $mainVersion is a minor release. + +## Migration notes + +### API changes + +### Installation changes + +### Index changes + +## List of changes + +Major changes: + +EOF + +git log --pretty='format:* %N' $previousversion.. | grep -v "^* $" >> docs/manual/docs/overview/change-log/version-$mainVersion.md + +cat <> docs/manual/docs/overview/change-log/version-$mainVersion.md + +and more \... see [$newversion issues](https://github.com/geonetwork/core-geonetwork/issues?q=is%3Aissue+milestone%3A$mainVersion+is%3Aclosed) and [pull requests](https://github.com/geonetwork/core-geonetwork/pulls?page=3&q=is%3Apr+milestone%3A$mainVersion+is%3Aclosed) for full details. +EOF diff --git a/release-publish.sh b/release-publish.sh new file mode 100755 index 00000000000..6da58990c27 --- /dev/null +++ b/release-publish.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +function showUsage +{ + echo -e "\nThis script is used to publish a release on sourceforge, github and maven repository" + echo + echo -e "Usage: ./`basename $0` sourceforge_username [remote]" + echo + echo -e "Example:" + echo -e "\t./`basename $0` sourceforgeusername" + echo -e "\t./`basename $0` sourceforgeusername upstream" + echo +} + +if [ "$1" = "-h" ] +then + showUsage + exit +fi + +if [[ ($# -ne 1) && ($# -ne 2) ]] +then + showUsage + exit +fi + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` +sourceforge_username=$1 +remote=origin + +if [ $# -eq 2 ] +then + remote=$2 +fi + +# Push the branch and tag to github +git push $remote $versionbranch +git push $remote $version +# TODO: attach release notes to version + +sftp $sourceforge_username,geonetwork@frs.sourceforge.net << EOT +cd /home/frs/project/g/ge/geonetwork/GeoNetwork_opensource +mkdir v${version} +cd v${version} +put docs/changes/changes{$version}-0.txt +put release/target/GeoNetwork*/geonetwork-bundle*.zip* +put web/target/geonetwork.war* +put datastorages/*/target/*.zip +bye +EOT + +# Deploy to osgeo repository (requires credentials in ~/.m2/settings.xml) +mvn deploy -DskipTests -Drelease diff --git a/release-restore.sh b/release-restore.sh new file mode 100755 index 00000000000..4f166f4de51 --- /dev/null +++ b/release-restore.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +buildRequiredApps=( "java" "git" "mvn" "ant" "xmlstarlet" ) + +for app in "${buildRequiredApps[@]}"; do : + if ! [ -x "$(command -v ${app})" ]; then + echo "Error: ${app} is not installed." >&2 + exit 1 + fi +done + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +subVersion=`cut -d "-" -f 2 <<< $projectVersion` +mainVersion=`cut -d "-" -f 1 <<< $projectVersion` +mainVersionMajor=`cut -d "." -f 1 <<< $mainVersion` +mainVersionMinor=`cut -d "." -f 2 <<< $mainVersion` +mainVersionSub=`cut -d "." -f 3 <<< $mainVersion` + +gitBranch=`git branch --show-current` + +nextVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub+1))" +previousVersionNumber="${mainVersionMajor}.${mainVersionMinor}.$((mainVersionSub-1))" + +from=origin +frombranch=origin/${gitBranch} +series=${mainVersionMajor}.${mainVersionMinor} +versionbranch=${gitBranch} +version=${projectVersion} +minorversion=0 +release=latest +newversion=${mainVersion}-$minorversion +currentversion=${projectVersion} +previousversion=${previousVersionNumber} +nextversion=${nextVersionNumber}-SNAPSHOT + +# Set version number to SNAPSHOT +./update-version.sh $newversion $nextversion + +git add . +git commit -m "Update version to $nextversion" + + diff --git a/release-test.sh b/release-test.sh new file mode 100755 index 00000000000..98e49da4b0b --- /dev/null +++ b/release-test.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +projectVersion=`xmlstarlet sel -t -m "/_:project/_:version" -v . -n pom.xml` +version=`cut -d "-" -f 1 <<< $projectVersion` +versionbranch=`git branch --show-current` + +echo "Testing zip in release/target/GeoNetwork-$version ..." + +cd "release/target/GeoNetwork-$version" +unzip -q "geonetwork-bundle-$projectVersion.zip" -d "geonetwork-bundle-$projectVersion" +cd "geonetwork-bundle-$projectVersion/bin" +./startup.sh -f diff --git a/release/build.properties b/release/build.properties index e7183fe9410..405c71163c6 100644 --- a/release/build.properties +++ b/release/build.properties @@ -5,11 +5,11 @@ homepage=https://geonetwork-opensource.org supportEmail=geonetwork-users@lists.sourceforge.net # Application version properties -version=4.4.3 +version=4.4.5 subVersion=SNAPSHOT # Java runtime properties javaVersion=11 javaDisplayVersion=11 -jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.3 +jreUrl=https://adoptium.net/en-GB/temurin/releases/?version=4.4.5 jreName=AdoptOpenJDK diff --git a/release/pom.xml b/release/pom.xml index 920336a32a0..e4727d59500 100644 --- a/release/pom.xml +++ b/release/pom.xml @@ -7,7 +7,7 @@ org.geonetwork-opensource geonetwork - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT gn-release diff --git a/schemas-test/pom.xml b/schemas-test/pom.xml index 6df035d8937..db6dae1d8e9 100644 --- a/schemas-test/pom.xml +++ b/schemas-test/pom.xml @@ -27,7 +27,7 @@ geonetwork org.geonetwork-opensource - 4.4.3-SNAPSHOT + 4.4.5-SNAPSHOT 4.0.0 jar diff --git a/schemas/config-editor.xsd b/schemas/config-editor.xsd index a6ecd0ed338..7e38ea07165 100644 --- a/schemas/config-editor.xsd +++ b/schemas/config-editor.xsd @@ -263,12 +263,14 @@ Configure here the list of fields to display using a table. This only applies to
    - - - - +
    + + + + + @@ -434,6 +436,7 @@ Table column. + @@ -1086,6 +1089,7 @@ the mandatory section with no name and then the inner elements. + @@ -1154,6 +1158,119 @@ Note: Only sections with forEach support del attribute. + + + + + + + + + + + + + + + + + + + ]]> + + + + + + + + + + + + + + + + Add a hyperlink on the item + + + + + + + + An optional name to override the default one base on field name for the + section. The name must be defined in ``{schema}/loc/{lang}/strings.xml``. + + + + + + The XPath of the element to create list items. + + + + + + + + + XPath of the element to sort the list by. Must use full name of each nodes eg. gmd:organisationName/gco:CharacterString + + + + + + + An optional attribute to collapse the section. If not set the section is expanded. + + + + + + An optional attribute to not allow collapse for the section. If not set the section is expandable. + + + + + + + Local name to match if the element does not exist. @@ -1168,6 +1285,46 @@ Note: Only sections with forEach support del attribute. + + + + +