From 5e1c099f177ed5d9b5420f7b2fcf9c796c8bcf14 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 20 Sep 2024 08:19:19 +0200 Subject: [PATCH 01/45] UFAL/Removed duplicities of the bitstreams in the cmdi (#766) * Removed duplicities of the bitstreams in the cmdi. * Fixed checkstyle violation * used lindat code instead of vanilla. --- .../java/org/dspace/xoai/util/ItemUtils.java | 128 +++++------------- 1 file changed, 31 insertions(+), 97 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 1db31bcf8d47..78f4571b6216 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -12,6 +12,7 @@ import java.io.InputStream; import java.sql.SQLException; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; @@ -106,7 +107,8 @@ public static Element.Field createValue(String name, String value) { return e; } - private static Element createBundlesElement(Context context, Item item) throws SQLException { + private static Element createBundlesElement(Context context, Item item, AtomicBoolean restricted) + throws SQLException { Element bundles = create("bundles"); List bs; @@ -136,13 +138,24 @@ private static Element createBundlesElement(Context context, Item item) throws S // get handle of parent Item of this bitstream, if there // is one: List bn = bit.getBundles(); - if (!bn.isEmpty()) { + if (bn.size() > 0) { List bi = bn.get(0).getItems(); - if (!bi.isEmpty()) { + if (bi.size() > 0) { handle = bi.get(0).getHandle(); } } - url = baseUrl + "/bitstreams/" + bit.getID().toString() + "/download"; + if (bsName == null) { + List ext = bit.getFormat(context).getExtensions(); + bsName = "bitstream_" + sid + (ext.size() > 0 ? ext.get(0) : ""); + } + if (handle != null && baseUrl != null) { + url = baseUrl + "/bitstream/" + + handle + "/" + + sid + "/" + + URLUtils.encode(bsName); + } else { + url = URLUtils.encode(bsName); + } String cks = bit.getChecksum(); String cka = bit.getChecksumAlgorithm(); @@ -166,7 +179,17 @@ private static Element createBundlesElement(Context context, Item item) throws S bitstream.getField().add(createValue("checksumAlgorithm", cka)); bitstream.getField().add(createValue("sid", bit.getSequenceID() + "")); bitstream.getField().add(createValue("id", bit.getID().toString())); - + if (!restricted.get()) { + List clarinLicenseResourceMappingList = + clarinLicenseResourceMappingService.findByBitstreamUUID(context, bit.getID()); + for (ClarinLicenseResourceMapping clrm : clarinLicenseResourceMappingList) { + if (clrm.getLicense().getRequiredInfo() != null + && clrm.getLicense().getRequiredInfo().length() > 0) { + restricted.set( true); + break; + } + } + } } } @@ -293,100 +316,11 @@ public static Metadata retrieveMetadata(Context context, Item item) { // Now adding bitstream info //indicate restricted bitstreams -> restricted access - boolean restricted = false; + AtomicBoolean restricted = new AtomicBoolean(false); try { - Element bundles = createBundlesElement(context, item); + Element bundles = createBundlesElement(context, item, restricted); metadata.getElement().add(bundles); - List bs; - - bs = item.getBundles(); - for (Bundle b : bs) { - Element bundle = create("bundle"); - bundles.getElement().add(bundle); - bundle.getField() - .add(createValue("name", b.getName())); - - Element bitstreams = create("bitstreams"); - bundle.getElement().add(bitstreams); - List bits = b.getBitstreams(); - for (Bitstream bit : bits) { - Element bitstream = create("bitstream"); - bitstreams.getElement().add(bitstream); - String url = ""; - String bsName = bit.getName(); - String sid = String.valueOf(bit.getSequenceID()); - String baseUrl = configurationService.getProperty("oai", "bitstream.baseUrl"); - String handle = null; - // get handle of parent Item of this bitstream, if there - // is one: - List bn = bit.getBundles(); - if (bn.size() > 0) { - List bi = bn.get(0).getItems(); - if (bi.size() > 0) { - handle = bi.get(0).getHandle(); - } - } - if (bsName == null) { - List ext = bit.getFormat(context).getExtensions(); - bsName = "bitstream_" + sid - + (ext.size() > 0 ? ext.get(0) : ""); - } - if (handle != null && baseUrl != null) { - url = baseUrl + "/bitstream/" - + handle + "/" - + sid + "/" - + URLUtils.encode(bsName); - } else { - url = URLUtils.encode(bsName); - } - - String cks = bit.getChecksum(); - String cka = bit.getChecksumAlgorithm(); - String oname = bit.getSource(); - String name = bit.getName(); - String description = bit.getDescription(); - - if (name != null) { - bitstream.getField().add( - createValue("name", name)); - } - if (oname != null) { - bitstream.getField().add( - createValue("originalName", name)); - } - if (description != null) { - bitstream.getField().add( - createValue("description", description)); - } - bitstream.getField().add( - createValue("format", bit.getFormat(context) - .getMIMEType())); - bitstream.getField().add( - createValue("size", "" + bit.getSizeBytes())); - bitstream.getField().add(createValue("url", url)); - bitstream.getField().add( - createValue("checksum", cks)); - bitstream.getField().add( - createValue("checksumAlgorithm", cka)); - bitstream.getField().add( - createValue("sid", bit.getSequenceID() - + "")); - bitstream.getField().add( - createValue("id", bit.getID() + "")); - if (!restricted) { - List clarinLicenseResourceMappingList = - clarinLicenseResourceMappingService.findByBitstreamUUID(context, bit.getID()); - for (ClarinLicenseResourceMapping clrm : clarinLicenseResourceMappingList) { - if (clrm.getLicense().getRequiredInfo() != null - && clrm.getLicense().getRequiredInfo().length() > 0) { - restricted = true; - break; - } - } - } - } - } } catch (SQLException e) { log.warn(e.getMessage(), e); } @@ -398,7 +332,7 @@ public static Metadata retrieveMetadata(Context context, Item item) { other.getField().add(createValue("identifier", DSpaceItem.buildIdentifier(item.getHandle()))); other.getField().add(createValue("lastModifyDate", item.getLastModified().toString())); - if (restricted) { + if (restricted.get()) { other.getField().add(createValue("restrictedAccess", "true")); } // Because we reindex Solr, which is not done in vanilla From 1e2b8ef6b55caaf2844c5dfa47886408b8acfb06 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 20 Sep 2024 08:19:44 +0200 Subject: [PATCH 02/45] Ufal/Preview issues (#764) * Ensure the content preview doesn't overload maximum length of the column in the database. And encode the input stream into UTF-8. * Do not store HTML content in the database because it could be longer than the limit of the database column. --- .../MetadataBitstreamRestRepository.java | 61 +++++++++++++++---- 1 file changed, 50 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataBitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataBitstreamRestRepository.java index 3eb2ee46185e..c53717306fcc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataBitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataBitstreamRestRepository.java @@ -12,6 +12,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; @@ -82,6 +83,10 @@ public class MetadataBitstreamRestRepository extends DSpaceRestRepository { private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataBitstreamRestRepository.class); + // This constant is used to limit the length of the preview content stored in the database to prevent + // the database from being overloaded with large amounts of data. + private static final int MAX_PREVIEW_COUNT_LENGTH = 2000; + @Autowired HandleService handleService; @@ -170,8 +175,12 @@ public Page findByHandle(@Parameter(value = "handl // Generate new content if we didn't find any if (prContents.isEmpty()) { fileInfos = getFilePreviewContent(context, bitstream, fileInfos); - for (FileInfo fi : fileInfos) { - createPreviewContent(context, bitstream, fi); + // Do not store HTML content in the database because it could be longer than the limit + // of the database column + if (!StringUtils.equals("text/html", bitstream.getFormat(context).getMIMEType())) { + for (FileInfo fi : fileInfos) { + createPreviewContent(context, bitstream, fi); + } } } else { for (PreviewContent pc : prContents) { @@ -311,8 +320,11 @@ private List processInputStreamToFilePreview(Context context, Bitstrea List fileInfos, InputStream inputStream) throws IOException, SQLException, ParserConfigurationException, SAXException, ArchiveException { String bitstreamMimeType = bitstream.getFormat(context).getMIMEType(); - if (bitstreamMimeType.equals("text/plain") || bitstreamMimeType.equals("text/html")) { - String data = getFileContent(inputStream); + if (bitstreamMimeType.equals("text/plain")) { + String data = getFileContent(inputStream, true); + fileInfos.add(new FileInfo(data, false)); + } else if (bitstreamMimeType.equals("text/html")) { + String data = getFileContent(inputStream, false); fileInfos.add(new FileInfo(data, false)); } else { String data = ""; @@ -462,17 +474,44 @@ public String extractFile(InputStream inputStream, String fileType) { * @return content of the inputStream as a String * @throws IOException */ - public String getFileContent(InputStream inputStream) throws IOException { + public String getFileContent(InputStream inputStream, boolean cutResult) throws IOException { StringBuilder content = new StringBuilder(); - BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); - - String line; - while ((line = reader.readLine()) != null) { - content.append(line).append("\n"); + // Generate the preview content in the UTF-8 encoding + BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); + try { + String line; + while ((line = reader.readLine()) != null) { + content.append(line).append("\n"); + } + } catch (UnsupportedEncodingException e) { + log.error("UnsupportedEncodingException during creating the preview content because: ", e); + } catch (IOException e) { + log.error("IOException during creating the preview content because: ", e); } reader.close(); - return content.toString(); + return cutResult ? ensureMaxLength(content.toString()) : content.toString(); + } + + /** + * Trims the input string to ensure it does not exceed the maximum length for the database column. + * @param input The original string to be trimmed. + * @return A string that is truncated to the maximum length if necessary. + */ + private static String ensureMaxLength(String input) { + if (input == null) { + return null; + } + + // Check if the input string exceeds the maximum preview length + if (input.length() > MAX_PREVIEW_COUNT_LENGTH) { + // Truncate the string and append " . . ." + int previewLength = MAX_PREVIEW_COUNT_LENGTH - 6; // Subtract length of " . . ." + return input.substring(0, previewLength) + " . . ."; + } else { + // Return the input string as is if it's within the preview length + return input; + } } /** From 4f579e1ad6bc26c848b7ffeffdfec347aa920700 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 20 Sep 2024 08:21:37 +0200 Subject: [PATCH 03/45] UFAL/Encoded the UTF-8 characters from the redirect URL to UTF (#758) * Encoded the UTF-8 characters from the redirect URL to UTF * Moved ClarinUtils into Utils class * Added a new `dq` package into ComponentScan * Moved dq.Utils into DSpace utils.Utils because the components with the same name causes conflicts. * Removed *.dq component scan from the App --- .../ClarinDiscoJuiceFeedsDownloadService.java | 4 +- .../clarin/ClarinShibbolethLoginFilter.java | 7 ++- .../dspace/app/rest/utils/ClarinUtils.java | 63 ------------------- .../java/org/dspace/app/rest/utils/Utils.java | 57 +++++++++++++++++ .../ClarinDiscoJuiceFeedsControllerIT.java | 4 +- 5 files changed, 67 insertions(+), 68 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java index 7fdd9a9ade54..73b5d2b3dfef 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsDownloadService.java @@ -34,7 +34,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.app.rest.utils.ClarinUtils; +import org.dspace.app.rest.utils.Utils; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; import org.json.simple.JSONArray; @@ -237,7 +237,7 @@ private static JSONArray downloadJSON(String url) { conn.setReadTimeout(10000); // Disable SSL certificate validation if (disableSSL && conn instanceof HttpsURLConnection) { - ClarinUtils.disableCertificateValidation((HttpsURLConnection) conn); + Utils.disableCertificateValidation((HttpsURLConnection) conn); } //Caution does not follow redirects, and even if you set it to http->https is not possible Object obj = parser.parse(new InputStreamReader(conn.getInputStream())); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java index 02ecaa593a90..78887d5f5e58 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java @@ -307,7 +307,12 @@ private void redirectAfterSuccess(HttpServletRequest request, HttpServletRespons if (StringUtils.equalsAnyIgnoreCase(redirectHostName, allowedHostNames.toArray(new String[0]))) { log.debug("Shibboleth redirecting to " + redirectUrl); - response.sendRedirect(redirectUrl); + // Encode the UTF-8 characters from redirect URL to UTF-8, to ensure it's properly encoded for the browser + String encodedRedirectUrl = org.dspace.app.rest.utils.Utils.encodeNonAsciiCharacters(redirectUrl); + if (StringUtils.isEmpty(encodedRedirectUrl)) { + log.error("Invalid Encoded Shibboleth redirectURL=" + redirectUrl + ". URL is empty!"); + } + response.sendRedirect(encodedRedirectUrl); } else { log.error("Invalid Shibboleth redirectURL=" + redirectUrl + ". URL doesn't match hostname of server or UI!"); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java deleted file mode 100644 index 2a93f5793205..000000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/ClarinUtils.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.utils; - -import java.security.KeyManagementException; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; - -import org.springframework.stereotype.Component; - -/** - * Collection of utility methods for clarin customized operations - * - * @author Milan Majchrak (dspace at dataquest.sk) - */ -@Component -public class ClarinUtils { - - private ClarinUtils() { - } - - /** - * Disables SSL certificate validation for the given connection - * - * @param connection - */ - public static void disableCertificateValidation(HttpsURLConnection connection) { - try { - // Create a TrustManager that trusts all certificates - TrustManager[] trustAllCerts = { new X509TrustManager() { - public java.security.cert.X509Certificate[] getAcceptedIssuers() { - return null; - } - - public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType) { - } - - public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType) { - } } - }; - - // Install the TrustManager - SSLContext sslContext = SSLContext.getInstance("SSL"); - sslContext.init(null, trustAllCerts, new SecureRandom()); - connection.setSSLSocketFactory(sslContext.getSocketFactory()); - - // Set a HostnameVerifier that accepts all hostnames - connection.setHostnameVerifier((hostname, session) -> true); - - } catch (NoSuchAlgorithmException | KeyManagementException e) { - throw new RuntimeException("Error disabling SSL certificate validation", e); - } - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index ed6e26ed0fb7..347a23b86de5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -29,6 +29,11 @@ import java.net.MalformedURLException; import java.net.URL; import java.net.URLDecoder; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; @@ -44,6 +49,10 @@ import java.util.TreeSet; import java.util.UUID; import javax.annotation.Nullable; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; @@ -1076,4 +1085,52 @@ private BaseObjectRest findBaseObjectRest(Context context, String apiCategory, S context.restoreAuthSystemState(); } } + + /** + * Disables SSL certificate validation for the given connection + * + * @param connection + */ + public static void disableCertificateValidation(HttpsURLConnection connection) { + try { + // Create a TrustManager that trusts all certificates + TrustManager[] trustAllCerts = { new X509TrustManager() { + public java.security.cert.X509Certificate[] getAcceptedIssuers() { + return null; + } + + public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType) { + } + + public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType) { + } } + }; + + // Install the TrustManager + SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(null, trustAllCerts, new SecureRandom()); + connection.setSSLSocketFactory(sslContext.getSocketFactory()); + + // Set a HostnameVerifier that accepts all hostnames + connection.setHostnameVerifier((hostname, session) -> true); + + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException("Error disabling SSL certificate validation", e); + } + } + + /** + * Function to encode only non-ASCII characters + */ + public static String encodeNonAsciiCharacters(String input) { + StringBuilder result = new StringBuilder(); + for (char ch : input.toCharArray()) { + if (!StringUtils.isAsciiPrintable(String.valueOf(ch))) { // Use Apache Commons method + result.append(URLEncoder.encode(String.valueOf(ch), StandardCharsets.UTF_8)); + } else { + result.append(ch); // Leave ASCII characters intact + } + } + return result.toString(); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java index 0075011fd0bf..d20298ac9116 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoJuiceFeedsControllerIT.java @@ -22,7 +22,7 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; -import org.dspace.app.rest.utils.ClarinUtils; +import org.dspace.app.rest.utils.Utils; import org.dspace.services.ConfigurationService; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; @@ -64,7 +64,7 @@ public void testDiscoFeedURL() throws Exception { // Disable SSL certificate validation if (disableSSL && conn instanceof HttpsURLConnection) { - ClarinUtils.disableCertificateValidation((HttpsURLConnection) conn); + Utils.disableCertificateValidation((HttpsURLConnection) conn); } Object obj = parser.parse(new InputStreamReader(conn.getInputStream())); From 140f0b72b2b79ee236e2c2b29ad1e981080a9e9e Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Tue, 24 Sep 2024 16:55:27 +0200 Subject: [PATCH 04/45] Merge pull request #9790 from DSpace/backport-9775-to-dspace-7_x (#769) [Port dspace-7_x] Make statistics autocommit much more frequently Co-authored-by: Tim Donohue --- dspace/solr/statistics/conf/solrconfig.xml | 25 ++++++++++++++-------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/dspace/solr/statistics/conf/solrconfig.xml b/dspace/solr/statistics/conf/solrconfig.xml index 2b1cff45373d..c3f023ff2eee 100644 --- a/dspace/solr/statistics/conf/solrconfig.xml +++ b/dspace/solr/statistics/conf/solrconfig.xml @@ -32,14 +32,16 @@ - + 32 1000 ${solr.lock.type:native} - + false @@ -48,7 +50,7 @@ 10000 - ${solr.autoCommit.maxTime:900000} + ${solr.autoCommit.maxTime:10000} true @@ -62,14 +64,16 @@ ${solr.max.booleanClauses:1024} + unordered sets of *all* documents that match a + query. Caches results of 'fq' search param. --> - + 1000 - + - + uuid @@ -126,7 +132,8 @@ - + uid From dd24d2b891e0974de2987def9ef91d4adf5d5f15 Mon Sep 17 00:00:00 2001 From: Paurikova2 <107862249+Paurikova2@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:18:06 +0200 Subject: [PATCH 05/45] test for bitstream with null value of sizebytes --- .../rest/ClarinBitstreamImportController.java | 8 +++-- .../ClarinBitstreamImportControllerIT.java | 35 +++++++++++++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinBitstreamImportController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinBitstreamImportController.java index 63380a756c2f..1620cb0609fc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinBitstreamImportController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinBitstreamImportController.java @@ -137,7 +137,11 @@ public BitstreamRest importBitstreamForExistingFile(HttpServletRequest request) Boolean deleted = Boolean.parseBoolean(deletedString); //set size bytes - bitstream.setSizeBytes(bitstreamRest.getSizeBytes()); + if (Objects.nonNull(bitstreamRest.getSizeBytes())) { + bitstream.setSizeBytes(bitstreamRest.getSizeBytes()); + } else { + log.info("SizeBytes is null. Bitstream UUID: " + bitstream.getID()); + } //set checksum bitstream.setChecksum(bitstreamRest.getCheckSum().getValue()); //set checksum algorithm @@ -206,7 +210,7 @@ public BitstreamRest importBitstreamForExistingFile(HttpServletRequest request) message += " for bundle with uuid: " + bundle.getID(); } log.error(message, e); - throw new RuntimeException("message", e); + throw new RuntimeException(message, e); } return bitstreamRest; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinBitstreamImportControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinBitstreamImportControllerIT.java index 9e0c0f339a7c..74ad6b65e687 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinBitstreamImportControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinBitstreamImportControllerIT.java @@ -270,6 +270,41 @@ public void importBitstreamForExistingFileValidationErrorTest() throws Exception assertEquals(bitstreamService.findAll(context).size(), 0); } + @Test + public void importDeletedBitstreamTest() throws Exception { + //input data + ObjectNode checksumNode = jsonNodeFactory.objectNode(); + checksumNode.set("checkSumAlgorithm", null); + checksumNode.set("value", null); + ObjectNode node = jsonNodeFactory.objectNode(); + node.set("sizeBytes", null); + node.set("checkSum", checksumNode); + + //create new bitstream for existing file + ObjectMapper mapper = new ObjectMapper(); + uuid = UUID.fromString(read( getClient(token).perform(post("/api/clarin/import/core/bitstream") + .content(mapper.writeValueAsBytes(node)) + .contentType(contentType) + .param("internal_id", internalId) + .param("storeNumber", "0") + .param("deleted", "true")) + .andExpect(status().isOk()) + .andReturn().getResponse().getContentAsString(), + "$.id")); + + bitstream = bitstreamService.find(context, uuid); + assertEquals(bitstream.getSizeBytes(), 0); + assertEquals(bitstream.getInternalId(), internalId); + assertEquals(bitstream.getStoreNumber(), 0); + assertEquals(bitstream.getSequenceID(), -1); + assertEquals(bitstream.isDeleted(), true); + + //clean all + context.turnOffAuthorisationSystem(); + BitstreamBuilder.deleteBitstream(uuid); + context.restoreAuthSystemState(); + } + private void checkCreatedBitstream(UUID uuid, String internalId, int storeNumber, String bitstreamFormat, int sequence, boolean deleted, long sizeBytes, String checkSum) throws SQLException { From 9386c0e76b5ffeb57d9d0eb8e5638d3a56ef6eeb Mon Sep 17 00:00:00 2001 From: Jozef Misutka <332350+vidiecan@users.noreply.github.com> Date: Tue, 15 Oct 2024 14:57:53 +0200 Subject: [PATCH 06/45] Update README.md --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 51c09fd677f7..abbe997c7351 100644 --- a/README.md +++ b/README.md @@ -128,4 +128,7 @@ The full license is available in the [LICENSE](LICENSE) file or online at http:/ DSpace uses third-party libraries which may be distributed under different licenses. Those licenses are listed in the [LICENSES_THIRD_PARTY](LICENSES_THIRD_PARTY) file. - \ No newline at end of file + +# Additional notes + +This project is tested with BrowserStack. From 0659305f58bbd8dc7676344bfb0fd34840a264f8 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Wed, 16 Oct 2024 10:13:21 +0200 Subject: [PATCH 07/45] UFAL/Shibboleth - load more net-id headers e.g. persistent-id (#772) * Load netid from more than one header. authentication-shibboleth.netid-header could be list, not only single value * Shibboleth login - sort the emails passed in the shibboleth email header and get the first one. * The user is redirected to the login page when it is trying to update eperson email which is already assigned to another eperson. * Sorting emails is moved into specific method and ShibbolethLoginFilter is updated following the ShibAuthentication changes * Fixed failing tests * The ClarinShibbolethLoginFilter and ClarinShibAuthentication has duplicate code, I put it into static method. --- .../clarin/ClarinShibAuthentication.java | 151 +++++++----- .../authenticate/clarin/ShibHeaders.java | 11 +- .../test/data/dspaceFolder/config/local.cfg | 2 +- .../clarin/ClarinShibbolethLoginFilter.java | 41 ++-- .../ClarinShibbolethLoginFilterIT.java | 229 +++++++++++++----- .../modules/authentication-shibboleth.cfg | 2 +- 6 files changed, 288 insertions(+), 148 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java index 822543d08c80..98c030be80c7 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java @@ -11,6 +11,7 @@ import java.net.URLEncoder; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -19,13 +20,13 @@ import java.util.Objects; import java.util.UUID; import java.util.regex.Pattern; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.app.util.Util; import org.dspace.authenticate.AuthenticationMethod; import org.dspace.authenticate.factory.AuthenticateServiceFactory; import org.dspace.authorize.AuthorizeException; @@ -247,15 +248,16 @@ public int authenticate(Context context, String username, String password, // Should we auto register new users. boolean autoRegister = configurationService.getBooleanProperty("authentication-shibboleth.autoregister", true); + String[] netidHeaders = configurationService.getArrayProperty("authentication-shibboleth.netid-header"); // Four steps to authenticate a user try { // Step 1: Identify User - EPerson eperson = findEPerson(context, request); + EPerson eperson = findEPerson(context, request, netidHeaders); // Step 2: Register New User, if necessary if (eperson == null && autoRegister && !isDuplicateUser) { - eperson = registerNewEPerson(context, request); + eperson = registerNewEPerson(context, request, netidHeaders); } if (eperson == null) { @@ -263,7 +265,7 @@ public int authenticate(Context context, String username, String password, } // Step 3: Update User's Metadata - updateEPerson(context, request, eperson); + updateEPerson(context, request, eperson, netidHeaders); // Step 4: Log the user in. context.setCurrentUser(eperson); @@ -540,11 +542,11 @@ public static boolean isEnabled() { * @throws SQLException if database error * @throws AuthorizeException if authorization error */ - protected EPerson findEPerson(Context context, HttpServletRequest request) throws SQLException, AuthorizeException { + protected EPerson findEPerson(Context context, HttpServletRequest request, String[] netidHeaders) + throws SQLException { boolean isUsingTomcatUser = configurationService .getBooleanProperty("authentication-shibboleth.email-use-tomcat-remote-user"); - String netidHeader = configurationService.getProperty("authentication-shibboleth.netid-header"); String emailHeader = configurationService.getProperty("authentication-shibboleth.email-header"); EPerson eperson = null; @@ -554,26 +556,10 @@ protected EPerson findEPerson(Context context, HttpServletRequest request) throw // 1) First, look for a netid header. - if (netidHeader != null) { - String org = shibheaders.get_idp(); - String netid = Util.formatNetId(findSingleAttribute(request, netidHeader), org); - if (StringUtils.isEmpty(netid)) { - netid = shibheaders.get_single(netidHeader); - } - - if (netid != null) { + if (netidHeaders != null) { + eperson = findEpersonByNetId(netidHeaders, shibheaders, eperson, ePersonService, context, true); + if (eperson != null) { foundNetID = true; - eperson = ePersonService.findByNetid(context, netid); - - if (eperson == null) { - log.info( - "Unable to identify EPerson based upon Shibboleth netid header: '" + netidHeader + "'='" + - netid + "'."); - } else { - log.debug( - "Identified EPerson based upon Shibboleth netid header: '" + netidHeader + "'='" + - netid + "'" + "."); - } } } @@ -656,7 +642,6 @@ protected EPerson findEPerson(Context context, HttpServletRequest request) throw return eperson; } - /** * Register a new eperson object. This method is called when no existing user was * found for the NetID or Email and autoregister is enabled. When these conditions @@ -677,11 +662,10 @@ protected EPerson findEPerson(Context context, HttpServletRequest request) throw * @throws SQLException if database error * @throws AuthorizeException if authorization error */ - protected EPerson registerNewEPerson(Context context, HttpServletRequest request) + protected EPerson registerNewEPerson(Context context, HttpServletRequest request, String[] netidHeaders) throws SQLException, AuthorizeException { // Header names - String netidHeader = configurationService.getProperty("authentication-shibboleth.netid-header"); String emailHeader = configurationService.getProperty("authentication-shibboleth.email-header"); String fnameHeader = configurationService.getProperty("authentication-shibboleth.firstname-header"); String lnameHeader = configurationService.getProperty("authentication-shibboleth.lastname-header"); @@ -694,15 +678,12 @@ protected EPerson registerNewEPerson(Context context, HttpServletRequest request // CLARIN // Header values - String netid = Util.formatNetId(findSingleAttribute(request, netidHeader), org); + String netid = getFirstNetId(netidHeaders); String email = getEmailAcceptedOrNull(findSingleAttribute(request, emailHeader)); String fname = Headers.updateValueByCharset(findSingleAttribute(request, fnameHeader)); String lname = Headers.updateValueByCharset(findSingleAttribute(request, lnameHeader)); // If the values are not in the request headers try to retrieve it from `shibheaders`. - if (StringUtils.isEmpty(netid)) { - netid = shibheaders.get_single(netidHeader); - } if (StringUtils.isEmpty(email) && Objects.nonNull(clarinVerificationToken)) { email = clarinVerificationToken.getEmail(); } @@ -718,7 +699,7 @@ protected EPerson registerNewEPerson(Context context, HttpServletRequest request // don't have at least these three pieces of information then we fail. String message = "Unable to register new eperson because we are unable to find an email address along " + "with first and last name for the user.\n"; - message += " NetId Header: '" + netidHeader + "'='" + netid + "' (Optional) \n"; + message += " NetId Header: '" + Arrays.toString(netidHeaders) + "'='" + netid + "' (Optional) \n"; message += " Email Header: '" + emailHeader + "'='" + email + "' \n"; message += " First Name Header: '" + fnameHeader + "'='" + fname + "' \n"; message += " Last Name Header: '" + lnameHeader + "'='" + lname + "'"; @@ -807,24 +788,20 @@ protected EPerson registerNewEPerson(Context context, HttpServletRequest request * @throws SQLException if database error * @throws AuthorizeException if authorization error */ - protected void updateEPerson(Context context, HttpServletRequest request, EPerson eperson) + protected void updateEPerson(Context context, HttpServletRequest request, EPerson eperson, String[] netidHeaders) throws SQLException, AuthorizeException { // Header names & values - String netidHeader = configurationService.getProperty("authentication-shibboleth.netid-header"); String emailHeader = configurationService.getProperty("authentication-shibboleth.email-header"); String fnameHeader = configurationService.getProperty("authentication-shibboleth.firstname-header"); String lnameHeader = configurationService.getProperty("authentication-shibboleth.lastname-header"); - String netid = Util.formatNetId(findSingleAttribute(request, netidHeader), shibheaders.get_idp()); + String netid = getFirstNetId(netidHeaders); String email = getEmailAcceptedOrNull(findSingleAttribute(request, emailHeader)); String fname = Headers.updateValueByCharset(findSingleAttribute(request, fnameHeader)); String lname = Headers.updateValueByCharset(findSingleAttribute(request, lnameHeader)); // If the values are not in the request headers try to retrieve it from `shibheaders`. - if (StringUtils.isEmpty(netid)) { - netid = shibheaders.get_single(netidHeader); - } if (StringUtils.isEmpty(email) && Objects.nonNull(clarinVerificationToken)) { email = clarinVerificationToken.getEmail(); } @@ -858,7 +835,16 @@ protected void updateEPerson(Context context, HttpServletRequest request, EPerso } // The email could have changed if using netid based lookup. if (email != null) { - eperson.setEmail(email.toLowerCase()); + String lowerCaseEmail = email.toLowerCase(); + // Check the email is unique + EPerson epersonByEmail = ePersonService.findByEmail(context, lowerCaseEmail); + if (epersonByEmail != null && !epersonByEmail.getID().equals(eperson.getID())) { + log.error("Unable to update the eperson's email metadata because the email '{}' is already in use.", + lowerCaseEmail); + throw new AuthorizeException("The email address is already in use."); + } else { + eperson.setEmail(email.toLowerCase()); + } } if (fname != null) { eperson.setFirstName(context, fname); @@ -1207,29 +1193,11 @@ public String findSingleAttribute(HttpServletRequest request, String name) { if (name == null) { return null; } - String value = findAttribute(request, name); - if (value != null) { - // If there are multiple values encoded in the shibboleth attribute - // they are separated by a semicolon, and any semicolons in the - // attribute are escaped with a backslash. For this case we are just - // looking for the first attribute so we scan the value until we find - // the first unescaped semicolon and chop off everything else. - int idx = 0; - do { - idx = value.indexOf(';', idx); - if (idx != -1 && value.charAt(idx - 1) != '\\') { - value = value.substring(0, idx); - break; - } - } while (idx >= 0); - - // Unescape the semicolon after splitting - value = value.replaceAll("\\;", ";"); + value = sortEmailsAndGetFirst(value); } - return value; } @@ -1338,5 +1306,70 @@ public String getEmailAcceptedOrNull(String email) { } return email; } + + /** + * Find an EPerson by a NetID header. The method will go through all the netid headers and try to find a user. + */ + public static EPerson findEpersonByNetId(String[] netidHeaders, ShibHeaders shibheaders, EPerson eperson, + EPersonService ePersonService, Context context, boolean logAllowed) + throws SQLException { + // Go through all the netid headers and try to find a user. It could be e.g., `eppn`, `persistent-id`,.. + for (String netidHeader : netidHeaders) { + netidHeader = netidHeader.trim(); + String netid = shibheaders.get_single(netidHeader); + if (netid == null) { + continue; + } + + eperson = ePersonService.findByNetid(context, netid); + + if (eperson == null && logAllowed) { + log.info( + "Unable to identify EPerson based upon Shibboleth netid header: '" + netidHeader + + "'='" + netid + "'."); + } else { + log.debug( + "Identified EPerson based upon Shibboleth netid header: '" + netidHeader + "'='" + + netid + "'" + "."); + } + } + return eperson; + } + + /** + * Sort the email addresses and return the first one. + * @param value The email addresses separated by semicolons. + */ + public static String sortEmailsAndGetFirst(String value) { + // If there are multiple values encoded in the shibboleth attribute + // they are separated by a semicolon, and any semicolons in the + // attribute are escaped with a backslash. + // Step 1: Split the input string into email addresses + List emails = Arrays.stream(value.split("(? email.replaceAll("\\\\;", ";")) // Unescape semicolons + .collect(Collectors.toList()); + + // Step 2: Sort the email list alphabetically + emails.sort(String::compareToIgnoreCase); + + // Step 3: Get the first sorted email + return emails.get(0); + } + + /** + * Get the first netid from the list of netid headers. E.g., eppn, persistent-id,... + * @param netidHeaders list of netid headers loaded from the configuration `authentication-shibboleth.netid-header` + */ + public String getFirstNetId(String[] netidHeaders) { + for (String netidHeader : netidHeaders) { + netidHeader = netidHeader.trim(); + String netid = shibheaders.get_single(netidHeader); + if (netid != null) { + //When creating use first match (eppn before targeted-id) + return netid; + } + } + return null; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ShibHeaders.java b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ShibHeaders.java index 65897087302e..a4b85e53be2e 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ShibHeaders.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ShibHeaders.java @@ -14,6 +14,7 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; +import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -32,7 +33,7 @@ public class ShibHeaders { // constants // private static final String header_separator_ = ";"; - private String netIdHeader = ""; + private String[] netIdHeaders = null; ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); // variables @@ -105,7 +106,7 @@ public String get_single(String name) { List values = get(name); if (values != null && !values.isEmpty()) { // Format netId - if (StringUtils.equals(name, this.netIdHeader)) { + if (ArrayUtils.contains(this.netIdHeaders, name)) { return Util.formatNetId(values.get(0), this.get_idp()); } return values.get(0); @@ -150,6 +151,10 @@ public void log_headers() { } private void initializeNetIdHeader() { - this.netIdHeader = configurationService.getProperty("authentication-shibboleth.netid-header"); + this.netIdHeaders = configurationService.getArrayProperty("authentication-shibboleth.netid-header"); + } + + public String[] getNetIdHeaders() { + return this.netIdHeaders; } } diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index cc9ccb26bbb2..1027d5bd93b7 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -219,7 +219,7 @@ featured.service.teitok.description = A web-based platform for viewing, creating ##### Shibboleth ##### -authentication-shibboleth.netid-header = SHIB-NETID +authentication-shibboleth.netid-header = SHIB-NETID,eppn,persistent-id authentication-shibboleth.email-header = SHIB-MAIL authentication-shibboleth.firstname-header = SHIB-GIVENNAME authentication-shibboleth.lastname-header = SHIB-SURNAME diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java index 78887d5f5e58..821be38ed2a2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java @@ -126,7 +126,6 @@ public Authentication attemptAuthentication(HttpServletRequest req, // If the Idp doesn't send the email in the request header, send the redirect order to the FE for the user // to fill in the email. - String netidHeader = configurationService.getProperty("authentication-shibboleth.netid-header"); String emailHeader = configurationService.getProperty("authentication-shibboleth.email-header"); Context context = ContextUtil.obtainContext(req); @@ -154,34 +153,34 @@ public Authentication attemptAuthentication(HttpServletRequest req, shib_headers = new ShibHeaders(req); } - // Retrieve the netid and email values from the header. - String netid = shib_headers.get_single(netidHeader); String idp = shib_headers.get_idp(); // If the clarin verification object is not null load the email from there otherwise from header. - String email = Objects.isNull(clarinVerificationToken) ? - shib_headers.get_single(emailHeader) : clarinVerificationToken.getEmail(); + String email; + if (Objects.isNull(clarinVerificationToken)) { + email = shib_headers.get_single(emailHeader); + if (StringUtils.isNotEmpty(email)) { + email = ClarinShibAuthentication.sortEmailsAndGetFirst(email); + } + } else { + email = clarinVerificationToken.getEmail(); + } EPerson ePerson = null; - if (StringUtils.isNotEmpty(netid)) { + try { + ePerson = ClarinShibAuthentication.findEpersonByNetId(shib_headers.getNetIdHeaders(), shib_headers, ePerson, + ePersonService, context, false); + } catch (SQLException e) { + // It is logged in the ClarinShibAuthentication class. + } + + if (Objects.isNull(ePerson) && StringUtils.isNotEmpty(email)) { try { - // If email is null and netid exist try to find the eperson by netid and load its email - if (StringUtils.isEmpty(email)) { - ePerson = ePersonService.findByNetid(context, netid); - email = Objects.isNull(email) ? this.getEpersonEmail(ePerson) : null; - } else { - // Try to get user by the email because of possible duplicate of the user email - ePerson = ePersonService.findByEmail(context, email); - } - } catch (SQLException ignored) { - // + ePerson = ePersonService.findByEmail(context, email); + } catch (SQLException e) { + // It is logged in the ClarinShibAuthentication class. } } - // logging - log.info("Shib-Identity-Provider: " + idp); - log.info("authentication-shibboleth.netid-header: " + netidHeader + " with value: " + netid); - log.info("authentication-shibboleth.email-header: " + emailHeader + " with value: " + email); - try { if (StringUtils.isEmpty(idp)) { log.error("Cannot load the idp from the request headers."); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/security/ClarinShibbolethLoginFilterIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/security/ClarinShibbolethLoginFilterIT.java index 8b62e95bed79..343f0c5fe035 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/security/ClarinShibbolethLoginFilterIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/security/ClarinShibbolethLoginFilterIT.java @@ -9,6 +9,7 @@ import static org.dspace.app.rest.security.ShibbolethLoginFilterIT.PASS_ONLY; import static org.dspace.app.rest.security.clarin.ClarinShibbolethLoginFilter.VERIFICATION_TOKEN_HEADER; +import static org.dspace.rdf.negotiation.MediaRange.token; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -19,6 +20,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -53,8 +55,12 @@ public class ClarinShibbolethLoginFilterIT extends AbstractControllerIntegrationTest { public static final String[] SHIB_ONLY = {"org.dspace.authenticate.clarin.ClarinShibAuthentication"}; + private static final String NET_ID_EPPN_HEADER = "eppn"; + private static final String NET_ID_PERSISTENT_ID = "persistent-id"; private static final String NET_ID_TEST_EPERSON = "123456789"; private static final String IDP_TEST_EPERSON = "Test Idp"; + private static final String KNIHOVNA_KUN_TEST_ZLUTOUCKY = "knihovna Kůň test Žluťoučký"; + private EPersonRest ePersonRest; private final String feature = CanChangePasswordFeature.NAME; @@ -183,10 +189,7 @@ public void userFillInEmailAndShouldBeRegisteredByVerificationToken() throws Exc .andExpect(status().isOk()); // Check if was created a user with such email and netid. - EPerson ePerson = ePersonService.findByNetid(context, Util.formatNetId(netId, idp)); - assertTrue(Objects.nonNull(ePerson)); - assertEquals(ePerson.getEmail(), email); - assertEquals(ePerson.getNetid(), Util.formatNetId(netId, idp)); + EPerson ePerson = checkUserWasCreated(netId, idp, email, null); // The user is registered now log him getClient().perform(post("/api/authn/shibboleth") @@ -207,7 +210,7 @@ public void userFillInEmailAndShouldBeRegisteredByVerificationToken() throws Exc .andExpect(status().isFound()); // Delete created eperson - clean after the test - EPersonBuilder.deleteEPerson(ePerson.getID()); + deleteShibbolethUser(ePerson); } @Test @@ -226,11 +229,7 @@ public void testShouldReturnDuplicateUserError() throws Exception { .andExpect(redirectedUrl("http://localhost:4000")) .andReturn().getResponse().getHeader("Authorization"); - - getClient(token).perform(get("/api/authn/status")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.authenticated", is(true))) - .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); + checkUserIsSignedIn(token); // Check if was created a user with such email and netid. EPerson ePerson = ePersonService.findByNetid(context, Util.formatNetId(netId, IDP_TEST_EPERSON)); @@ -252,6 +251,33 @@ public void testShouldReturnDuplicateUserError() throws Exception { EPersonBuilder.deleteEPerson(ePerson.getID()); } + // Login with email without netid, but the user with such email already exists and it has assigned netid. + @Test + public void testShouldReturnDuplicateUserErrorLoginWithoutNetId() throws Exception { + String email = "test@email.sk"; + String netId = email; + + // login through shibboleth + String token = getClient().perform(get("/api/authn/shibboleth") + .header("SHIB-MAIL", email) + .header("SHIB-NETID", netId) + .header("Shib-Identity-Provider", IDP_TEST_EPERSON)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000")) + .andReturn().getResponse().getHeader("Authorization"); + + checkUserIsSignedIn(token); + + // Should not login because the user with such email already exists + getClient().perform(get("/api/authn/shibboleth") + .header("SHIB-MAIL", email) + .header("Shib-Identity-Provider", IDP_TEST_EPERSON)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000/login/duplicate-user?email=" + email)) + .andReturn().getResponse().getHeader("Authorization"); + + } + // This test is copied from the `ShibbolethLoginFilterIT` and modified following the Clarin updates. @Test public void testRedirectToGivenTrustedUrl() throws Exception { @@ -264,10 +290,7 @@ public void testRedirectToGivenTrustedUrl() throws Exception { .andExpect(redirectedUrl("http://localhost:8080/server/api/authn/status")) .andReturn().getResponse().getHeader("Authorization"); - getClient(token).perform(get("/api/authn/status")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.authenticated", is(true))) - .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); + checkUserIsSignedIn(token); getClient(token).perform( get("/api/authz/authorizations/search/object") @@ -299,11 +322,7 @@ public void patchPassword() throws Exception { .andExpect(redirectedUrl("http://localhost:4000")) .andReturn().getResponse().getHeader("Authorization"); - - getClient(token).perform(get("/api/authn/status")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.authenticated", is(true))) - .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); + checkUserIsSignedIn(token); // updates password getClient(token).perform(patch("/api/eperson/epersons/" + clarinEperson.getID()) @@ -328,11 +347,7 @@ public void testRedirectToDefaultDspaceUrl() throws Exception { .andExpect(redirectedUrl("http://localhost:4000")) .andReturn().getResponse().getHeader("Authorization"); - - getClient(token).perform(get("/api/authn/status")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.authenticated", is(true))) - .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); + checkUserIsSignedIn(token); getClient(token).perform( get("/api/authz/authorizations/search/object") @@ -463,28 +478,10 @@ public void testISOShibHeaders() throws Exception { .andExpect(redirectedUrl("http://localhost:4000")) .andReturn().getResponse().getHeader("Authorization"); - - getClient(token).perform(get("/api/authn/status")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.authenticated", is(true))) - .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); - + checkUserIsSignedIn(token); // Check if was created a user with such email and netid. - EPerson ePerson = ePersonService.findByNetid(context, Util.formatNetId(testNetId, testIdp)); - assertTrue(Objects.nonNull(ePerson)); - assertEquals(ePerson.getEmail(), testMail); - assertEquals(ePerson.getFirstName(), "knihovna Kůň test Žluťoučký"); - - EPersonBuilder.deleteEPerson(ePerson.getID()); - - getClient(token).perform( - get("/api/authz/authorizations/search/object") - .param("embed", "feature") - .param("feature", feature) - .param("uri", utils.linkToSingleResource(ePersonRest, "self").getHref())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))) - .andExpect(jsonPath("$._embedded").doesNotExist()); + EPerson ePerson = checkUserWasCreated(testNetId, testIdp, testMail, KNIHOVNA_KUN_TEST_ZLUTOUCKY); + deleteShibbolethUser(ePerson); } @Test @@ -500,25 +497,143 @@ public void testUTF8ShibHeaders() throws Exception { .header("SHIB-MAIL", testMail) .header("Shib-Identity-Provider", testIdp) .header("SHIB-NETID", testNetId) - .header("SHIB-GIVENNAME", "knihovna Kůň test Žluťoučký")) + .header("SHIB-GIVENNAME", KNIHOVNA_KUN_TEST_ZLUTOUCKY)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000")) + .andReturn().getResponse().getHeader("Authorization"); + + checkUserIsSignedIn(token); + // Check if was created a user with such email and netid. + EPerson ePerson = checkUserWasCreated(testNetId, testIdp, testMail, KNIHOVNA_KUN_TEST_ZLUTOUCKY); + deleteShibbolethUser(ePerson); + } + + @Test + public void testRedirectToMissingHeadersWithRedirectUrlParam() throws Exception { + String expectedMissingHeadersUrl = configurationService.getProperty("dspace.ui.url") + "/login/missing-headers"; + + getClient().perform(get("/api/authn/shibboleth") + .param("redirectUrl", "http://localhost:8080/server/api/authn/status") + .header("SHIB-MAIL", clarinEperson.getEmail()) + .header("SHIB-NETID", NET_ID_TEST_EPERSON)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl(expectedMissingHeadersUrl)); + } + + // eppn is set + @Test + public void testSuccessFullLoginEppnNetId() throws Exception { + String token = getClient().perform(get("/api/authn/shibboleth") + .header("Shib-Identity-Provider", IDP_TEST_EPERSON) + .header("SHIB-MAIL", clarinEperson.getEmail()) + .header(NET_ID_EPPN_HEADER, NET_ID_TEST_EPERSON)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000")) + .andReturn().getResponse().getHeader("Authorization"); + + checkUserIsSignedIn(token); + + EPerson ePerson = checkUserWasCreated(NET_ID_TEST_EPERSON, IDP_TEST_EPERSON, clarinEperson.getEmail(), null); + deleteShibbolethUser(ePerson); + } + + // persistent-id is set + @Test + public void testSuccessFullLoginPersistentIdNetId() throws Exception { + String token = getClient().perform(get("/api/authn/shibboleth") + .header("Shib-Identity-Provider", IDP_TEST_EPERSON) + .header("SHIB-MAIL", clarinEperson.getEmail()) + .header(NET_ID_PERSISTENT_ID, NET_ID_TEST_EPERSON)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000")) + .andReturn().getResponse().getHeader("Authorization"); + + checkUserIsSignedIn(token); + EPerson ePerson = checkUserWasCreated(NET_ID_TEST_EPERSON, IDP_TEST_EPERSON, clarinEperson.getEmail(), null); + deleteShibbolethUser(ePerson); + } + + @Test + public void testSuccessFullLoginWithTwoEmails() throws Exception { + String firstEmail = "efg@test.edu"; + String secondEmail = "abc@test.edu"; + String token = getClient().perform(get("/api/authn/shibboleth") + .header("Shib-Identity-Provider", IDP_TEST_EPERSON) + .header("SHIB-MAIL", firstEmail + ";" + secondEmail)) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("http://localhost:4000")) .andReturn().getResponse().getHeader("Authorization"); + checkUserIsSignedIn(token); + // Find the user by the second email + EPerson ePerson = checkUserWasCreated(null, IDP_TEST_EPERSON, secondEmail, null); + deleteShibbolethUser(ePerson); + } + // The user has changed the email. But that email is already used by another user. + @Test + public void testDuplicateEmailError() throws Exception { + String userWithEppnEmail = "user@eppn.sk"; + String customEppn = "custom eppn"; + + // Create a user with netid and email + String tokenEppnUser = getClient().perform(get("/api/authn/shibboleth") + .header("Shib-Identity-Provider", IDP_TEST_EPERSON) + .header(NET_ID_PERSISTENT_ID, customEppn) + .header("SHIB-MAIL", userWithEppnEmail)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000")) + .andReturn().getResponse().getHeader("Authorization"); + + checkUserIsSignedIn(tokenEppnUser); + + // Try to update an email of existing user - the email is already used by another user - the user should be + // redirected to the login page + getClient().perform(get("/api/authn/shibboleth") + .header("Shib-Identity-Provider", IDP_TEST_EPERSON) + .header(NET_ID_PERSISTENT_ID, NET_ID_TEST_EPERSON) + .header("SHIB-MAIL", userWithEppnEmail)) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("http://localhost:4000/login/duplicate-user?email=" + userWithEppnEmail)); + + // Check if was created a user with such email and netid. + EPerson ePerson = checkUserWasCreated(customEppn, IDP_TEST_EPERSON, userWithEppnEmail, null); + // Delete created eperson - clean after the test + deleteShibbolethUser(ePerson); + } + + private EPerson checkUserWasCreated(String netIdValue, String idpValue, String email, String name) + throws SQLException { + // Check if was created a user with such email and netid. + EPerson ePerson = null; + if (netIdValue != null) { + ePerson = ePersonService.findByNetid(context, Util.formatNetId(netIdValue, idpValue)); + } else { + ePerson = ePersonService.findByEmail(context, email); + } + assertTrue(Objects.nonNull(ePerson)); + if (email != null) { + assertEquals(ePerson.getEmail(), email); + } + + if (name != null) { + assertEquals(ePerson.getFirstName(), name); + } + return ePerson; + } + + private void checkUserIsSignedIn(String token) throws Exception { getClient(token).perform(get("/api/authn/status")) .andExpect(status().isOk()) .andExpect(jsonPath("$.authenticated", is(true))) .andExpect(jsonPath("$.authenticationMethod", is("shibboleth"))); + } - // Check if was created a user with such email and netid. - EPerson ePerson = ePersonService.findByNetid(context, Util.formatNetId(testNetId, testIdp)); - assertTrue(Objects.nonNull(ePerson)); - assertEquals(ePerson.getEmail(), testMail); - assertEquals(ePerson.getFirstName(), "knihovna Kůň test Žluťoučký"); + private void deleteShibbolethUser(EPerson ePerson) throws Exception { EPersonBuilder.deleteEPerson(ePerson.getID()); + // Check it was correctly deleted getClient(token).perform( get("/api/authz/authorizations/search/object") .param("embed", "feature") @@ -528,16 +643,4 @@ public void testUTF8ShibHeaders() throws Exception { .andExpect(jsonPath("$.page.totalElements", is(0))) .andExpect(jsonPath("$._embedded").doesNotExist()); } - - @Test - public void testRedirectToMissingHeadersWithRedirectUrlParam() throws Exception { - String expectedMissingHeadersUrl = configurationService.getProperty("dspace.ui.url") + "/login/missing-headers"; - - getClient().perform(get("/api/authn/shibboleth") - .param("redirectUrl", "http://localhost:8080/server/api/authn/status") - .header("SHIB-MAIL", clarinEperson.getEmail()) - .header("SHIB-NETID", NET_ID_TEST_EPERSON)) - .andExpect(status().is3xxRedirection()) - .andExpect(redirectedUrl(expectedMissingHeadersUrl)); - } } diff --git a/dspace/config/modules/authentication-shibboleth.cfg b/dspace/config/modules/authentication-shibboleth.cfg index 7e176f71b718..62815a46d828 100644 --- a/dspace/config/modules/authentication-shibboleth.cfg +++ b/dspace/config/modules/authentication-shibboleth.cfg @@ -90,7 +90,7 @@ authentication-shibboleth.lazysession.secure = true # Authentication headers for Mail, NetID, and Tomcat's Remote User. # Supply all parameters possible. -authentication-shibboleth.netid-header = eppn +authentication-shibboleth.netid-header = eppn,persistent-id authentication-shibboleth.email-header = mail authentication-shibboleth.email-use-tomcat-remote-user = false From 999164cccb20eec48831f91a18c432052638a9bb Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Wed, 16 Oct 2024 10:25:01 +0200 Subject: [PATCH 08/45] Propagate the verification token to the DB after the email is successfully sent. (#786) --- .../app/rest/ClarinAutoRegistrationController.java | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java index de2dccb9e866..af6c01714fa7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java @@ -95,12 +95,8 @@ public ResponseEntity sendEmail(HttpServletRequest request, HttpServletResponse log.error("Cannot load the `dspace.ui.url` property from the cfg."); throw new RuntimeException("Cannot load the `dspace.ui.url` property from the cfg."); } - // Generate token and create ClarinVerificationToken record with the token and user email. + // Generate token String verificationToken = Utils.generateHexKey(); - clarinVerificationToken.setEmail(email); - clarinVerificationToken.setToken(verificationToken); - clarinVerificationTokenService.update(context, clarinVerificationToken); - context.commit(); // Compose the url with the verification token. The user will be redirected to the UI. String autoregistrationURL = uiUrl + "/login/autoregistration?verification-token=" + verificationToken; @@ -121,6 +117,13 @@ public ResponseEntity sendEmail(HttpServletRequest request, HttpServletResponse return null; } + // Add ClarinVerificationToken record with the token and user email to the database only if the + // email was successfully send. + clarinVerificationToken.setEmail(email); + clarinVerificationToken.setToken(verificationToken); + clarinVerificationTokenService.update(context, clarinVerificationToken); + context.commit(); + return ResponseEntity.ok().build(); } From 6c6de8722b7d8fedef8a08154c887831b9ea5a0a Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 17 Oct 2024 16:21:38 +0200 Subject: [PATCH 09/45] UFAL/Enhanced type-bind feature (#762) * type bind is correctly rendered in the FE, but BE is still not working * Synchronized the `submission-forms_cs.xml` * Added doc into `submission-forms` about enhanced type-bind `field` * Updated `local.cfg` for tests - added type-bind property * Updated docs for the customized type-bind configuration property. * Updated MetadataValidation following the type-bind customization. * Added isAllowed function for multiple type-bind definitions * Added some docs for the new method * The values of the input wasn't loaded. * Allowed fields could be empty when they should have values. * Used isEmpty function and created constant for the `=>`. --- .../java/org/dspace/app/util/DCInput.java | 36 +- .../org/dspace/app/util/DCInputsReader.java | 1 + .../test/data/dspaceFolder/config/local.cfg | 6 +- .../app/rest/submit/step/DescribeStep.java | 33 +- .../step/validation/MetadataValidation.java | 62 +- .../ClarinWorkflowItemRestRepositoryIT.java | 45 + dspace/config/dspace.cfg | 3 +- dspace/config/submission-forms.dtd | 3 + dspace/config/submission-forms.xml | 7 +- dspace/config/submission-forms_cs.xml | 1962 +++++++++++++---- 10 files changed, 1680 insertions(+), 478 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index 9fd306d1aaf4..f9d43a9abc98 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -25,6 +25,7 @@ import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.xml.sax.SAXException; @@ -249,12 +250,11 @@ public DCInput(Map fieldMap, Map> listMap, // parsing of the element (using the colon as split separator) typeBind = new ArrayList(); String typeBindDef = fieldMap.get("type-bind"); - if (typeBindDef != null && typeBindDef.trim().length() > 0) { - String[] types = typeBindDef.split(","); - for (String type : types) { - typeBind.add(type.trim()); - } - } + this.insertToTypeBind(typeBindDef); + String typeBindField = fieldMap.get(DCInputsReader.TYPE_BIND_FIELD_ATTRIBUTE); + this.insertToTypeBind(typeBindField); + + style = fieldMap.get("style"); isRelationshipField = fieldMap.containsKey("relationship-type"); isMetadataField = fieldMap.containsKey("dc-schema"); @@ -273,6 +273,15 @@ public DCInput(Map fieldMap, Map> listMap, } + private void insertToTypeBind(String typeBindDef) { + if (StringUtils.isNotEmpty(typeBindDef)) { + String[] types = typeBindDef.split(","); + for (String type : types) { + typeBind.add(type.trim()); + } + } + } + protected void initRegex(String regex) { this.regex = null; this.pattern = null; @@ -559,6 +568,21 @@ public boolean isAllowedFor(String typeName) { return typeBind.contains(typeName); } + /** + * Decides if this field is valid for the document type + * Check if one of the typeName is in the typeBind list + * + * @param typeNames List of document type names e.g. ["VIDEO"] + * @return true when there is no type restriction or typeName is allowed + */ + public boolean isAllowedFor(List typeNames) { + if (typeBind.isEmpty()) { + return true; + } + + return CollectionUtils.containsAny(typeBind, typeNames); + } + public String getScope() { return visibility; } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java index d013a7d3fe7b..f79de9bd8df7 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java @@ -74,6 +74,7 @@ public class DCInputsReader { * Keyname for storing the name of the complex input type */ static final String COMPLEX_DEFINITION_REF = "complex-definition-ref"; + public static final String TYPE_BIND_FIELD_ATTRIBUTE = "field"; /** diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 1027d5bd93b7..51d050c296a2 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -306,4 +306,8 @@ sync.storage.service.enabled = false signposting.enabled = true # Test configuration has only EN locale (submission-forms.xml) -webui.supported.locales = en \ No newline at end of file +webui.supported.locales = en + +# Type bind configuration for the submission form with special type-bind field +# When title is something like "Type-bind test" the type-bind field will popped up +submit.type-bind.field = dc.type,dc.identifier.citation=>dc.title \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java index 8b0054e71775..682983acc2ab 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java @@ -63,6 +63,8 @@ */ public class DescribeStep extends AbstractProcessingStep { + public static final String KEY_VALUE_SEPARATOR = "=>"; + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DescribeStep.class); // Input reader for form configuration @@ -93,15 +95,34 @@ public DataDescribe getData(SubmissionService submissionService, InProgressSubmi private void readField(InProgressSubmission obj, SubmissionStepConfig config, DataDescribe data, DCInputSet inputConfig) throws DCInputsReaderException { - String documentTypeValue = ""; - List documentType = itemService.getMetadataByMetadataString(obj.getItem(), - configurationService.getProperty("submit.type-bind.field", "dc.type")); - if (documentType.size() > 0) { - documentTypeValue = documentType.get(0).getValue(); + List documentTypeValueList = new ArrayList<>(); + List typeBindFields = Arrays.asList( + configurationService.getArrayProperty("submit.type-bind.field", new String[0])); + + for (String typeBindField : typeBindFields) { + String typeBFKey = typeBindField; + if (typeBindField.contains(KEY_VALUE_SEPARATOR)) { + String[] parts = typeBindField.split(KEY_VALUE_SEPARATOR); + // Get the second part of the split - the metadata field + typeBFKey = parts[1]; + } + List documentType = itemService.getMetadataByMetadataString(obj.getItem(), typeBFKey); + if (documentType.size() > 0) { + documentTypeValueList.add(documentType.get(0).getValue()); + } } // Get list of all field names (including qualdrop names) allowed for this dc.type - List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + List allowedFieldNames = new ArrayList<>(); + + if (CollectionUtils.isEmpty(documentTypeValueList)) { + // If no dc.type is set, we allow all fields + allowedFieldNames.addAll(inputConfig.populateAllowedFieldNames(null)); + } else { + documentTypeValueList.forEach(documentTypeValue -> { + allowedFieldNames.addAll(inputConfig.populateAllowedFieldNames(documentTypeValue)); + }); + } // Loop input rows and process submitted metadata for (DCInput[] row : inputConfig.getFields()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java index 1ff9639906ac..fe46ee0ba9b3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.submit.step.validation; +import static org.dspace.app.rest.submit.step.DescribeStep.KEY_VALUE_SEPARATOR; + import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -68,22 +70,46 @@ public List validate(SubmissionService submissionService, InProgressS SubmissionStepConfig config) throws DCInputsReaderException, SQLException { List errors = new ArrayList<>(); - String documentTypeValue = ""; DCInputSet inputConfig = getInputReader().getInputsByFormName(config.getId()); - List documentType = itemService.getMetadataByMetadataString(obj.getItem(), - configurationService.getProperty("submit.type-bind.field", "dc.type")); - if (documentType.size() > 0) { - documentTypeValue = documentType.get(0).getValue(); + List documentTypeValueList = new ArrayList<>(); + // Get the list of type-bind fields. It could be in the form of schema.element.qualifier=>metadata_field, or + // just metadata_field + List typeBindFields = Arrays.asList( + configurationService.getArrayProperty("submit.type-bind.field", new String[0])); + + for (String typeBindField : typeBindFields) { + String typeBFKey = typeBindField; + // If the type-bind field is in the form of schema.element.qualifier=>metadata_field, split it and get the + // metadata field + if (typeBindField.contains(KEY_VALUE_SEPARATOR)) { + String[] parts = typeBindField.split(KEY_VALUE_SEPARATOR); + // Get the second part of the split - the metadata field + typeBFKey = parts[1]; + } + // Get the metadata value for the type-bind field + List documentType = itemService.getMetadataByMetadataString(obj.getItem(), typeBFKey); + if (documentType.size() > 0) { + documentTypeValueList.add(documentType.get(0).getValue()); + } } - // Get list of all field names (including qualdrop names) allowed for this dc.type - List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + // Get list of all field names (including qualdrop names) allowed for this dc.type, or specific type-bind field + List allowedFieldNames = new ArrayList<>(); + + if (CollectionUtils.isEmpty(documentTypeValueList)) { + // If no dc.type is set, we allow all fields + allowedFieldNames.addAll(inputConfig.populateAllowedFieldNames(null)); + } else { + documentTypeValueList.forEach(documentTypeValue -> { + allowedFieldNames.addAll(inputConfig.populateAllowedFieldNames(documentTypeValue)); + }); + } - // Begin the actual validation loop for (DCInput[] row : inputConfig.getFields()) { for (DCInput input : row) { String fieldKey = - metadataAuthorityService.makeFieldKey(input.getSchema(), input.getElement(), input.getQualifier()); + metadataAuthorityService.makeFieldKey(input.getSchema(), input.getElement(), + input.getQualifier()); boolean isAuthorityControlled = metadataAuthorityService.isAuthorityControlled(fieldKey); List fieldsName = new ArrayList(); @@ -99,10 +125,10 @@ public List validate(SubmissionService submissionService, InProgressS // Check the lookup list. If no other inputs of the same field name allow this type, // then remove. This includes field name without qualifier. - if (!input.isAllowedFor(documentTypeValue) && (!allowedFieldNames.contains(fullFieldname) + if (!input.isAllowedFor(documentTypeValueList) && (!allowedFieldNames.contains(fullFieldname) && !allowedFieldNames.contains(input.getFieldName()))) { itemService.removeMetadataValues(ContextUtil.obtainCurrentRequestContext(), - obj.getItem(), mdv); + obj.getItem(), mdv); } else { validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors); if (mdv.size() > 0 && input.isVisible(DCInput.SUBMISSION_SCOPE)) { @@ -126,7 +152,7 @@ public List validate(SubmissionService submissionService, InProgressS for (String fieldName : fieldsName) { boolean valuesRemoved = false; List mdv = itemService.getMetadataByMetadataString(obj.getItem(), fieldName); - if (!input.isAllowedFor(documentTypeValue)) { + if (!input.isAllowedFor(documentTypeValueList)) { // Check the lookup list. If no other inputs of the same field name allow this type, // then remove. Otherwise, do not if (!(allowedFieldNames.contains(fieldName))) { @@ -134,26 +160,26 @@ public List validate(SubmissionService submissionService, InProgressS obj.getItem(), mdv); valuesRemoved = true; log.debug("Stripping metadata values for " + input.getFieldName() + " on type " - + documentTypeValue + " as it is allowed by another input of the same field " + + + documentTypeValueList + " as it is allowed by another input of the same field " + "name"); } else { log.debug("Not removing unallowed metadata values for " + input.getFieldName() + " on type " - + documentTypeValue + " as it is allowed by another input of the same field " + + + documentTypeValueList + " as it is allowed by another input of the same field " + "name"); } } validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors); if (((input.isRequired() && mdv.size() == 0) && input.isVisible(DCInput.SUBMISSION_SCOPE) - && !valuesRemoved) + && !valuesRemoved) || !isValidComplexDefinitionMetadata(input, mdv)) { // Is the input required for *this* type? In other words, are we looking at a required // input that is also allowed for this document type - if (input.isAllowedFor(documentTypeValue)) { + if (input.isAllowedFor(documentTypeValueList)) { // since this field is missing add to list of error // fields addError(errors, ERROR_VALIDATION_REQUIRED, "/" + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" + - input.getFieldName()); + input.getFieldName()); } } if (LOCAL_METADATA_HAS_CMDI.equals(fieldName)) { @@ -167,7 +193,9 @@ public List validate(SubmissionService submissionService, InProgressS } } } + } + return errors; } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinWorkflowItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinWorkflowItemRestRepositoryIT.java index 20ec8dc38f24..5815533b44c5 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinWorkflowItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinWorkflowItemRestRepositoryIT.java @@ -320,4 +320,49 @@ public void shouldCreateProvenanceMessageOnItemSubmit() throws Exception { } assertThat(containsSubmitterProvenance, is(true)); } + + // When some input field has ... in the submission-forms.xml + @Test + public void shouldCreateItemWithCustomTypeBindField() throws Exception { + context.turnOffAuthorisationSystem(); + String CITATION_VALUE = "Some citation"; + + //** GIVEN ** + //1. A community with one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + // Submitter group - allow deposit a new item without workflow + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + + //3. a workspace item + WorkspaceItem wsitem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Type-bind test") + .withIssueDate("2017-10-17") + .grantLicense() + .withMetadata("dc", "identifier", "citation", CITATION_VALUE) + .build(); + + context.restoreAuthSystemState(); + + // get the submitter auth token + String authToken = getAuthToken(admin.getEmail(), password); + + // submit the workspaceitem to start the workflow + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + wsitem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + + // Load deposited item and check the provenance metadata + Item depositedItem = itemService.find(context, wsitem.getItem().getID()); + List mvList = itemService.getMetadata(depositedItem, "dc", "identifier", + "citation", Item.ANY); + assertFalse(mvList.isEmpty()); + assertThat(mvList.get(0).getValue(), is(CITATION_VALUE)); + } } diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index c07753e4a9e6..759983b45535 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -1553,7 +1553,8 @@ request.item.reject.email = true #------------------SUBMISSION CONFIGURATION------------------------# #------------------------------------------------------------------# # Field to use for type binding, default dc.type -submit.type-bind.field = dc.type +# It could be in the form of schema.element.qualifier=>metadata_field, or just metadata_field +submit.type-bind.field = dc.type,dc.language.iso=>edm.type #---------------------------------------------------------------# #----------SOLR DATABASE RESYNC SCRIPT CONFIGURATION------------# diff --git a/dspace/config/submission-forms.dtd b/dspace/config/submission-forms.dtd index c44e52fa07a2..932494dc7728 100644 --- a/dspace/config/submission-forms.dtd +++ b/dspace/config/submission-forms.dtd @@ -20,6 +20,9 @@ + + diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index ad6044e3054d..1a63054ec9d2 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -1776,6 +1776,9 @@ Please give us a description + dc @@ -1783,7 +1786,7 @@ iso true - TEXT + TEXT autocomplete Select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -1798,7 +1801,7 @@ iso true - VIDEO,IMAGE,SOUND,3D + VIDEO,IMAGE,SOUND,3D autocomplete Optionally, select the language of the main content of the item. Multiple languages are possible. Start diff --git a/dspace/config/submission-forms_cs.xml b/dspace/config/submission-forms_cs.xml index c3250c3a357a..bd06c7fdeac4 100644 --- a/dspace/config/submission-forms_cs.xml +++ b/dspace/config/submission-forms_cs.xml @@ -111,13 +111,40 @@ dc - title - alternative + source + uri + false + + onebox + Uveďte URL projektu + + http.* + + + + + local + demo + uri + false + + onebox + URL se vzorky dat, v případě nástrojů předvedení výstupu. + + http.* + + + + + dc + relation + isreferencedby true onebox If the item has any alternative titles, please enter them here. + http.* @@ -207,12 +234,124 @@ iso false + corpus,lexicalConceptualResource,languageDescription dropdown Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'. - + Please choose a language for the resource. + + + + + dc + language + iso + true + + toolService + onebox + If the tool/service is language dependent, select the appropriate language(s). Otherwise leave the field empty. Multiple languages are possible. Start typing the language and use autocomplete form that will appear. + + + + + + local + size + info + true + + corpus,languageDescription,lexicalConceptualResource + complex + You can state the extent of the submitted data, eg. the number of tokens. + + + + + metashare + ResourceInfo#ContentInfo + mediaType + false + + corpus,lexicalConceptualResource + dropdown + Media type of the main content of the item e.g., "text" for + textual corpora or "audio" for audio recordings. + + Media type is required + + + + + metashare + ResourceInfo#ContentInfo + mediaType + false + + languageDescription + dropdown + Media type of the main content of the item e.g., "text" for + textual corpora or "audio" for audio recordings. + + Media type is required + + + + + + + metashare + ResourceInfo#ContentInfo + detailedType + false + + toolService + dropdown + + Choose one of the types + + + + + metashare + ResourceInfo#ContentInfo + detailedType + false + + languageDescription + dropdown + + Choose one of the types + + + + + metashare + ResourceInfo#ContentInfo + detailedType + false + + lexicalConceptualResource + dropdown + + Choose one of the types + + + + + metashare + ResourceInfo#ResourceComponentType#ToolServiceInfo + languageDependent + false + + toolService + list + Indicate whether the operation of the tool or service is + language dependent or not + + Please indicate whether the tool is language dependent @@ -254,10 +393,10 @@ onebox - The actual maximum upload size of the file is 4GB. To upload the file bigger than maximum - upload size type the URL of that big file. Admin must know URL to that bitstream file. - Then click on the 'Save' button and the file will start to upload. The file will be loaded - from the '/temp' folder of the server. Example: /tomcat/temp/bitstream.png + The actual maximum upload size of the file is 4GB. To upload a file bigger than the + maximum upload size, enter the URL of that large file. The admin must know the URL + of that bitstream file. Then, click on the 'Save' button, and the file will start + to upload. The file path must be an absolute path. @@ -279,7 +418,11 @@ true tag - Enter appropriate subject keywords or phrases. + Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for + multiple keywords or use separators i.e., Enter and comma, which will split it accordingly. + Start typing the keyword and use autocomplete form that will appear. End your input by pressing + ESC if you don't want to use the preselected value. + Please enter at least one subject related to your submission srsc @@ -1432,44 +1575,104 @@ -
+ - local - submission - note + edm + type + false - - textarea - Zde můžete zanechat vzkaz editorům. - + + dropdown + Choose one of TEXT, VIDEO, SOUND, IMAGE, 3D. If choosing + TEXT consider adding the resource among other Language Resources. Images are visual resources for users to + look at. Text materials are meant to be read and not looked at. + Please select one of the options. - - dc - relation - replaces - true - + title + + false + onebox - URL příbuzného záznamu, který je tímto záznamem nahrazen. Pokud je příbuzný záznam v tomto repozitáři, začněte psát jeho název, nebo handle a vyberte záznam z nabídky. + Enter the main title of the item in English. + You must enter a main title for this item. + + + + + local + demo + uri + false + + onebox + A url with samples of the resource or, in the case of tools, + of samples of the output. + + http.* - - dc relation - isreplacedby + isreferencedby true - + onebox - Příbuzný záznam, který nahrazuje tento. + Link to original paper that references this dataset. + + http.* + + + + + dc + date + issued + false + + date + Please give the date when the submission data were issued if any e.g., 2014-01-21 or at least + the year. + + You must enter the date or at least the year in a valid format. + + + + + local + hidden + + false + + list + Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" + for weblicht submissions. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + + + + + local + hasMetadata + + false + + list + + Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht + submissions. + policy=deny,action=read,grantee-type=user,grantee-id=* @@ -1477,675 +1680,1526 @@
- - - - - - - - - - - - - - - - - - - - - Yes - true - - - - - ISSN - issn - - - Other - other - - - ISMN - ismn - - - Gov't Doc # - govdoc - - - URI - uri - - - ISBN - isbn - - - - - - - N/A - N/A - - - EU - euFunds - - - Own funds - ownFunds - - - National - nationalFunds - - - other - Other - - - - - - Animation - Animation - - - Article - Article - - - Book - Book - - - Book chapter - Book chapter - - - Dataset - Dataset - - - Learning Object - Learning Object - - - Image - Image - - - Image, 3-D - Image, 3-D - - - Map - Map - - - Musical Score - Musical Score - - - Plan or blueprint - Plan or blueprint - - - Preprint - Preprint - - - Presentation - Presentation - - - Recording, acoustical - Recording, acoustical - - - Recording, musical - Recording, musical - - - Recording, oral - Recording, oral - - +
+ + + dc + contributor + author + true + + clarin-name + Enter the names of the authors of this item. Start typing the author's last name and use + autocomplete form that will appear if applicable. End your input by pressing ESC if you don't + want to use the preselected value. + + Please add author(s) + + + + + dc + publisher + + true + + autocomplete + The name of the publisher of the original analog or born + digital object. Use your home institution if this is a born digital object being published now. Start typing the + publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you + don't want to use the preselected value. + + You must enter the name of the publisher. + + + + + local + dataProvider + + false + + autocomplete + This concerns the digital object (not the analog + original). An institution from which the data come. Used eg. to give proper attribution. Generally + different from publisher. + + + + + + local + contact + person + true + + complex + Person to contact in case of any issues with this submission. + Please fill all the fields for the contact person. + + + + + local + sponsor + true + + complex + Acknowledge sponsors and funding that supported work described by this submission. + + +
+
+ + + dc + type + + false + + autocomplete + The type should be different from what you have + entered in the first step. Examples: photo or painting for IMAGE, book or letter for TEXT, etc. + Type is required + + + + + + dc + description + + false + + aaa + textarea + Enter a description of the submitted data. + Please give us a description + + + + + dc + language + iso + true + + TEXT + autocomplete + Select the language of the main content of the item. Multiple languages are possible. Start + typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + The language is required for TEXT resources + + + + + dc + language + iso + true + + VIDEO,IMAGE,SOUND,3D + autocomplete + Optionally, select the language of the main content + of the item. Multiple languages are possible. Start + typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + + + + + + dc + subject + + + true + + tag + Enter appropriate subject keyword or phrase and press + the Add button. Use keywords to specify also people, places and times (period, era, date range etc) the resource + is about. You can use hierarchical subjects, separate the hierarchy levels with two colons (::). Eg. + People::John Doe, Places::New York, Times::WWII. + You can repeat it for multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. + Start typing the keyword and use autocomplete form that will appear. End your input by pressing + ESC if you don't want to use the preselected value. + + Please enter at least one subject related to your submission + + + + + dc + identifier + other + true + + + onebox + The item will get a handle. If the item has any + identification numbers or codes associated with it, please enter the types and the actual numbers or codes. + + + + + + + local + size + info + true + + complex + You can state the extent of the submitted data, eg. the number of tokens. + + +
+ +
+ + + + + dc + title + + false + + onebox + Enter the main title of the item in English. + You must enter a main title for this item. + + + + + local + demo + uri + false + + onebox + Course homepage + + http.* + + + + + dc + relation + isreferencedby + true + + onebox + Link to original paper that references this dataset. + + http.* + + + + + dc + date + issued + false + + date + Please give the date when the submission data were issued if any e.g., 2014-01-21 or at least + the year. + + You must enter the date or at least the year in a valid format. + + + + + local + hidden + + false + + list + Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" + for weblicht submissions. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + + + + + local + hasMetadata + + false + + list + + Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht + submissions. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + + +
+ +
+ + + + dc + contributor + author + true + + clarin-name + Enter the names of the authors of this item. Start typing the author's last name and use + autocomplete form that will appear if applicable. End your input by pressing ESC if you don't + want to use the preselected value. + + Please add author(s) + + + + + dc + publisher + + true + + autocomplete + The name of the publisher of the original analog or born + digital object. Use your home institution if this is a born digital object being published now. Start typing the + publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you + don't want to use the preselected value. + + You must enter the name of the publisher. + + + + + local + contact + person + true + + complex + Person to contact in case of any issues with this submission. + Please fill all the fields for the contact person. + + + + + local + sponsor + true + + complex + Acknowledge sponsors and funding that supported work described by this submission. + + +
+ +
+ + + dc + type + + false + + + dropdown + teachingMaterials + This is here to autofill a value. The value should not be changed. + Please select a resource type for your submission. + + + + + dc + description + + false + + textarea + Enter a description of the submitted data. + Please give us a description + + + + + dc + language + iso + true + + autocomplete + Select the language of the main content of the item. Multiple languages are possible. Start + typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + Please choose a language for the resource. + + + + + dc + subject + + + true + + autocomplete + Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for + multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. + Start typing the keyword and use autocomplete form that will appear. End your input by pressing + ESC if you don't want to use the preselected value. + + Please enter at least one subject related to your submission + + + + + dc + identifier + other + true + + + onebox + The item will get a handle. If the item has any + identification numbers or codes associated with it, please enter the types and the actual numbers or codes. + + + + +
+ +
+ + + local + submission + note + false + + textarea + Zde můžete zanechat vzkaz editorům. + + + + + + + + dc + relation + replaces + true + + onebox + URL příbuzného záznamu, který je tímto záznamem nahrazen. Pokud je příbuzný záznam v tomto repozitáři, začněte psát jeho název, nebo handle a vyberte záznam z nabídky. + + + + + + + + dc + relation + isreplacedby + true + + onebox + Příbuzný záznam, který nahrazuje tento. + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + + +
+ + + + + + + + + + + + + + + + + + + + + Yes + true + + + + + ISSN + issn + + + Other + other + + + ISMN + ismn + + + Gov't Doc # + govdoc + + + URI + uri + + + ISBN + isbn + + + + + + + N/A + N/A + + + EU + euFunds + + + Own funds + ownFunds + + + National + nationalFunds + + + other + Other + + + + + + N/A + + + + Corpus + corpus + + + Lexical conceptual + lexicalConceptualResource + + + Language description + languageDescription + + + Technology / Tool / Service + toolService + + + + + + + + + N/A + + + + English (United States) + en_US + + + English + en + + + Spanish + es + + + German + de + + + French + fr + + + Italian + it + + + Japanese + ja + + + Chinese + zh + + + Portuguese + pt + + + Turkish + tr + + + (Other) + other + + + + + + + + + + + Without License + + + + Attribution (CC-BY) + http://creativecommons.org/licenses/by/4.0/ + + + Attribution, No Derivative Works (CC-BY-ND) + http://creativecommons.org/licenses/by-nd/4.0/ + + + Attribution, Share-alike (CC-BY-SA) + http://creativecommons.org/licenses/by-sa/4.0/ + + + Attribution, Non-commercial (CC-BY-NC) + http://creativecommons.org/licenses/by-nc/4.0/ + + + Attribution, Non-commercial, No Derivative Works (CC-BY-NC-ND) + http://creativecommons.org/licenses/by-nc-nd/4.0/ + + + Attribution, Non-commercial, Share-alike (CC-BY-NC-SA) + http://creativecommons.org/licenses/by-nc-sa/4.0/ + + + + Other + other + + + + + + + + Interactive Resource + interactive resource + + + - Website + website + + + Dataset + dataset + + + - Interview + interview + + + Image + image + + + - Moving Image + moving image + + + -- Video + video + + + - Still Image + still image + + + Other + other + + Software - Software + software + + + - Research Software + research software + + + Workflow + workflow + + + Cartographic Material + cartographic material + + + - Map + map + + + Sound + sound + + + - Musical Composition + musical composition + + + Text + text + + + - Annotation + annotation + + + - Bibliography + bibliography + + + - Book + book + + + -- Book Part + book part + + + - Conference Object + conference object + + + -- Conference Proceedings + conference proceedings + + + --- Conference Paper + conference paper + + + --- Conference Poster + conference poster + + + -- Conference Paper Not In Proceedings + conference paper not in proceedings + + + -- Conference Poster Not In Proceedings + conference poster not in proceedings + + + - Lecture + lecture + + + - Letter + letter + + + - Periodical + periodical + + + -- Journal + journal + + + --- Contribution to Journal + contribution to journal + + + ---- Journal Article + journal article + + + ----- Data Paper + data paper + + + ----- Review Article + review article + + + ----- Research Article + research article + + + ----- Corrigendum + corrigendum + + + ----- Software Paper + software paper + + + ---- Editorial + editorial + + + ---- Letter to the Editor + letter to the editor + + + -- Newspaper + newspaper - Technical Report - Technical Report + --- Newspaper Article + newspaper article - Thesis - Thesis + -- Magazine + magazine - Video - Video + - Patent + patent - Working Paper - Working Paper + - Preprint + preprint - Other - Other + - Report + report - - - - - N/A - + -- Report Part + report part - English (United States) - en_US + -- Internal Report + internal report - English - en + -- Memorandum + memorandum - Spanish - es + -- Other Type of Report + other type of report - German - de + -- Policy Report + policy report - French - fr + -- Project Deliverable + project deliverable - Italian - it + --- Data Management Plan + data management plan - Japanese - ja + -- Report to Funding Agency + report to funding agency - Chinese - zh + -- Research Report + research report - Portuguese - pt + -- Technical Report + technical report - Turkish - tr + - Research Proposal + research proposal - (Other) - other + - Review + review + + + -- Book Review + book review + + + - Technical Documentation + technical documentation + + + - Working Paper + working paper + + + - Thesis + thesis + + + -- Bachelor Thesis + bachelor thesis + + + -- Doctoral Thesis + doctoral thesis + + + -- Master Thesis + master thesis + + + - Musical Notation + musical notation + + + - Blog Post + blog post + + + - Manuscript + website + + + Learning Object + learning object + + + Clinical Trial + clinical trial + + + Clinical Study + clinical study - - - + + + Author’s Original + http://purl.org/coar/version/c_b1a7d7d4d402bcce + + + Submitted Manuscript Under Review + http://purl.org/coar/version/c_71e4c1898caa6e32 + + + Accepted Manuscript + http://purl.org/coar/version/c_ab4af688f83e57aa + + + Proof + http://purl.org/coar/version/c_fa2ee174bc00049f + + + Version of Record + http://purl.org/coar/version/c_970fb48d4fbd8a85 + + + Corrected Version of Record + http://purl.org/coar/version/c_e19f295774971610 + + + Enhanced Version of Record + http://purl.org/coar/version/c_dc82b40f9837b551 + + + Not Applicable (or Unknown) + http://purl.org/coar/version/c_be7fb7dd8ff6fe43 + + - + + + open access + http://purl.org/coar/access_right/c_abf2 + + + embargoed access + http://purl.org/coar/access_right/c_f1cf + + + restricted access + http://purl.org/coar/access_right/c_16ec + + + metadata only access + http://purl.org/coar/access_right/c_14cb + + + - Without License - + Scopus Author ID + scopus-author-id - Attribution (CC-BY) - http://creativecommons.org/licenses/by/4.0/ + Ciencia ID + ciencia-id - Attribution, No Derivative Works (CC-BY-ND) - http://creativecommons.org/licenses/by-nd/4.0/ + Google Scholar ID + gsid - Attribution, Share-alike (CC-BY-SA) - http://creativecommons.org/licenses/by-sa/4.0/ + Open Researcher and Contributor ID (ORCID) + orcid - Attribution, Non-commercial (CC-BY-NC) - http://creativecommons.org/licenses/by-nc/4.0/ + Web of Science ResearcherID + rid - Attribution, Non-commercial, No Derivative Works (CC-BY-NC-ND) - http://creativecommons.org/licenses/by-nc-nd/4.0/ + ISNI - International Standard Name Identifier + isni - Attribution, Non-commercial, Share-alike (CC-BY-NC-SA) - http://creativecommons.org/licenses/by-nc-sa/4.0/ + Other + + + + + + + ISNI - International Standard Name Identifier + isni + + + Ringgold identifier + rin + + + Research Organization Registry + ror - Other - other + + + + + + + N/A + + + + Is a Funding Organization + FundingOrganization + + + + TEXT + TEXT + + + VIDEO + VIDEO + + + SOUND + SOUND + + + IMAGE + IMAGE + + + 3D + 3D + - - + + + + + + + N/A + + - Interactive Resource - interactive resource + Web Executable + webExecutable - - Website - website + Paper copy + paperCopy - Dataset - dataset + HardDisk + hardDisk - - Interview - interview + Blu Ray + bluRay - Image - image + DVD-R + DVD-R - - Moving Image - moving image + CD-ROM + CD-ROM - -- Video - video + Download + downloadable - - Still Image - still image + Accessible Through Interface + accessibleThroughInterface - Other + other other + + + + - Software - software + N/A + - - Research Software - research software + True + True - Workflow - workflow + False + False + + + + - Cartographic Material - cartographic material + N/A + - - Map - map + text + text - Sound - sound + audio + audio - - Musical Composition - musical composition + video + video - Text + image + image + + + + + + N/A + + + + text text - - Annotation - annotation + video + video - - Bibliography - bibliography + image + image + + + + - - Book - book + N/A + - -- Book Part - book part + tool + tool - - Conference Object - conference object + service + service - -- Conference Proceedings - conference proceedings + platform + platform - --- Conference Paper - conference paper + suiteOfTools + suiteOfTools - --- Conference Poster - conference poster + infrastructure + infrastructure - -- Conference Paper Not In Proceedings - conference paper not in proceedings + architecture + architecture - -- Conference Poster Not In Proceedings - conference poster not in proceedings + nlpDevelopmentEnvironment + nlpDevelopmentEnvironment - - Lecture - lecture + other + other + + - - Letter - letter + N/A + - - Periodical - periodical + wordList + wordList - -- Journal - journal + computationalLexicon + computationalLexicon - --- Contribution to Journal - contribution to journal + ontology + ontology - ---- Journal Article - journal article + wordnet + wordnet - ----- Data Paper - data paper + thesaurus + thesaurus - ----- Review Article - review article + framenet + framenet - ----- Research Article - research article + terminologicalResource + terminologicalResource - ----- Corrigendum - corrigendum + machineReadableDictionary + machineReadableDictionary - ----- Software Paper - software paper + lexicon + lexicon - ---- Editorial - editorial + other + other + + - ---- Letter to the Editor - letter to the editor + N/A + - -- Newspaper - newspaper + grammar + grammar - --- Newspaper Article - newspaper article + machine learning model + mlmodel - -- Magazine - magazine + n-gram model + ngrammodel - - Patent - patent + other + other + + + + - - Preprint - preprint + N/A + + - - Report - report + terms + terms - -- Report Part - report part + entries + entries - -- Internal Report - internal report + turns + turns - -- Memorandum - memorandum + utterances + utterances - -- Other Type of Report - other type of report + articles + articles - -- Policy Report - policy report + files + files - -- Project Deliverable - project deliverable + items + items - --- Data Management Plan - data management plan + seconds + seconds - -- Report to Funding Agency - report to funding agency + elements + elements - -- Research Report - research report + units + units - -- Technical Report - technical report + minutes + minutes - - Research Proposal - research proposal + hours + hours - - Review - review + texts + texts - -- Book Review - book review + sentences + sentences - - Technical Documentation - technical documentation + pages + pages - - Working Paper - working paper + bytes + bytes - - Thesis - thesis + tokens + tokens - -- Bachelor Thesis - bachelor thesis + words + words - -- Doctoral Thesis - doctoral thesis + keywords + keywords - -- Master Thesis - master thesis + idiomaticExpressions + idiomaticExpressions - - Musical Notation - musical notation + neologisms + neologisms - - Blog Post - blog post + multiWordUnits + multiWordUnits - - Manuscript - website + expressions + expressions - Learning Object - learning object + synsets + synsets - Clinical Trial - clinical trial + classes + classes - Clinical Study - clinical study + concepts + concepts - - - - Author’s Original - http://purl.org/coar/version/c_b1a7d7d4d402bcce + lexicalTypes + lexicalTypes - Submitted Manuscript Under Review - http://purl.org/coar/version/c_71e4c1898caa6e32 + phoneticUnits + phoneticUnits - Accepted Manuscript - http://purl.org/coar/version/c_ab4af688f83e57aa + syntacticUnits + syntacticUnits - Proof - http://purl.org/coar/version/c_fa2ee174bc00049f + semanticUnits + semanticUnits - Version of Record - http://purl.org/coar/version/c_970fb48d4fbd8a85 + predicates + predicates - Corrected Version of Record - http://purl.org/coar/version/c_e19f295774971610 + phonemes + phonemes - Enhanced Version of Record - http://purl.org/coar/version/c_dc82b40f9837b551 + diphones + diphones - Not Applicable (or Unknown) - http://purl.org/coar/version/c_be7fb7dd8ff6fe43 + T-HPairs + T-HPairs - - - - open access - http://purl.org/coar/access_right/c_abf2 + syllables + syllables - embargoed access - http://purl.org/coar/access_right/c_f1cf + frames + frames - restricted access - http://purl.org/coar/access_right/c_16ec + images + images - metadata only access - http://purl.org/coar/access_right/c_14cb + kb + kb - - - - Scopus Author ID - scopus-author-id + mb + mb - Ciencia ID - ciencia-id + gb + gb - Google Scholar ID - gsid + rb + rb - Open Researcher and Contributor ID (ORCID) - orcid + shots + shots - Web of Science ResearcherID - rid + unigrams + unigrams - ISNI - International Standard Name Identifier - isni + bigrams + bigrams - Other - + trigrams + trigrams - - - - ISNI - International Standard Name Identifier - isni + 4-grams + 4-grams - Ringgold identifier - rin + 5-grams + 5-grams - Research Organization Registry - ror + n-grams + n-grams - Other - + rules + rules + + + other + other - + - N/A - + Yes + true - Is a Funding Organization - FundingOrganization + No + false - Hidden @@ -2153,6 +3207,13 @@ + + + teachingMaterials + teachingMaterials + + + - true - twobox + autocomplete Enter appropriate subject keywords or phrases. srsc diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java index 855a80c4a3af..8021e4e0d771 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java @@ -164,6 +164,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { inputField.setSelectableMetadata(selectableMetadata); inputField.setTypeBind(dcinput.getTypeBindList()); inputField.setComplexDefinition(dcinput.getComplexDefinitionJSONString()); + inputField.setAutocompleteCustom(dcinput.getAutocompleteCustom()); } if (dcinput.isRelationshipField()) { selectableRelationship = getSelectableRelationships(dcinput); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java index 28a67730ea64..efafa5927e8a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java @@ -92,6 +92,13 @@ public class SubmissionFormFieldRest { * ComplexDefinition transformed to the JSON string */ private String complexDefinition; + + /** + * Autocomplete custom field. Give suggestions from this specific autocomplete solr index/file. + */ + private String autocompleteCustom; + + /** * Getter for {@link #selectableMetadata} * @@ -298,4 +305,12 @@ public void setComplexDefinition(String complexDefinition) { public String getComplexDefinition() { return this.complexDefinition; } + + public String getAutocompleteCustom() { + return autocompleteCustom; + } + + public void setAutocompleteCustom(String autocompleteCustom) { + this.autocompleteCustom = autocompleteCustom; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java index 7174dae9f12f..d192abb7b6c7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java @@ -10,16 +10,7 @@ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedList; import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; -import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; @@ -29,6 +20,7 @@ import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.MetadataValueWrapper; import org.dspace.app.rest.model.MetadataValueWrapperRest; +import org.dspace.app.rest.utils.Utils; import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; import org.dspace.content.service.ItemService; @@ -51,7 +43,7 @@ /** * This is the repository responsible to manage MetadataValueWrapper Rest object. * - * @author Milan Majchrak (milan.majchrak at dataquest.sk) + * @author Milan Majchrak (dspace at dataquest.sk) */ @Component(MetadataValueWrapperRest.CATEGORY + "." + MetadataValueWrapperRest.NAME) public class MetadataValueRestRepository extends DSpaceRestRepository { @@ -122,31 +114,17 @@ public Page findByValue(@Parameter(value = "schema", r // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(metadataField, searchValue, pageable); + this.createDiscoverQuery(metadataField, searchValue); if (ObjectUtils.isEmpty(discoverQuery)) { throw new IllegalArgumentException("Cannot create a DiscoverQuery from the arguments."); } - // regex if searchValue consist of numbers and characters - // \d - digit - String regexNumber = "(.)*(\\d)(.)*"; - // \D - non digit - String regexString = "(.)*(\\D)(.)*"; - Pattern patternNumber = Pattern.compile(regexNumber); - Pattern patternString = Pattern.compile(regexString); - // if the searchValue is mixed with numbers and characters the Solr ignore numbers by default - // divide the characters and numbers from searchValue to the separate queries and from separate queries - // create one complex query - if (patternNumber.matcher(searchValue).matches() && patternString.matcher(searchValue).matches()) { - List characterList = this.extractCharacterListFromString(searchValue); - List numberList = this.extractNumberListFromString(searchValue); - - String newQuery = this.composeQueryWithNumbersAndChars(metadataField, characterList, numberList); - discoverQuery.setQuery(newQuery); + String normalizedQuery = Utils.normalizeDiscoverQuery(searchValue, metadataField); + if (StringUtils.isNotBlank(normalizedQuery)) { + discoverQuery.setQuery(normalizedQuery); } - List metadataValueWrappers = new ArrayList<>(); try { DiscoverResult searchResult = searchService.search(context, discoverQuery); @@ -183,40 +161,6 @@ public Page findByValue(@Parameter(value = "schema", r return converter.toRestPage(metadataValueWrappers, pageable, utils.obtainProjection()); } - /** - * From searchValue get all String values which are separated by the number to the List of Strings. - * @param searchValue e.g. 'my1Search2' - * @return e.g. [my, Search] - */ - private List extractCharacterListFromString(String searchValue) { - List characterList = null; - // get characters from searchValue as List - searchValue = searchValue.replaceAll("[0-9]", " "); - characterList = new LinkedList<>(Arrays.asList(searchValue.split(" "))); - // remove empty characters from the characterList - characterList.removeIf(characters -> characters == null || "".equals(characters)); - - return characterList; - } - - /** - * From searchValue get all number values which are separated by the number to the List of Strings. - * @param searchValue e.g. 'my1Search2' - * @return e.g. [1, 2] - */ - private List extractNumberListFromString(String searchValue) { - List numberList = new ArrayList<>(); - - // get numbers from searchValue as List - Pattern numberRegex = Pattern.compile("-?\\d+"); - Matcher numberMatcher = numberRegex.matcher(searchValue); - while (numberMatcher.find()) { - numberList.add(numberMatcher.group()); - } - - return numberList; - } - public List filterEUSponsors(List metadataWrappers) { return metadataWrappers.stream().filter(m -> !m.getMetadataValue().getValue().contains("info:eu-repo")) .collect(Collectors.toList()); @@ -224,51 +168,14 @@ public List filterEUSponsors(List me public List distinctMetadataValues(List metadataWrappers) { return metadataWrappers.stream().filter( - distinctByKey(metadataValueWrapper -> metadataValueWrapper.getMetadataValue().getValue()) ) + Utils.distinctByKey(metadataValueWrapper -> metadataValueWrapper.getMetadataValue().getValue()) ) .collect( Collectors.toList() ); } - /** - * From list of String and list of Numbers create a query for the SolrQuery. - * @param metadataField e.g. `dc.contributor.author` - * @param characterList e.g. [my, Search] - * @param numberList e.g. [1, 2] - * @return "dc.contributor.author:*my* AND dc.contributor.author:*Search* AND dc.contributor.author:*1* AND ..." - */ - private String composeQueryWithNumbersAndChars(String metadataField, List characterList, - List numberList) { - this.addQueryTemplateToList(metadataField, characterList); - this.addQueryTemplateToList(metadataField, numberList); - - String joinedChars = String.join(" AND ", characterList); - String joinedNumbers = String.join(" AND ", numberList); - return joinedChars + " AND " + joinedNumbers; - - } - - /** - * Add SolrQuery template to the every item of the List - * @param metadataField e.g. `dc.contributor.author` - * @param stringList could be List of String or List of Numbers which are in the String format because of Solr - * e.g. [my, Search] - * @return [dc.contributor.author:*my*, dc.contributor.author:*Search*] - */ - private List addQueryTemplateToList(String metadataField, List stringList) { - String template = metadataField + ":" + "*" + " " + "*"; - - AtomicInteger index = new AtomicInteger(); - stringList.forEach(characters -> { - String queryString = template.replaceAll(" ", characters); - stringList.set(index.getAndIncrement(), queryString); - }); - return stringList; - } - - private DiscoverQuery createDiscoverQuery(String metadataField, String searchValue, Pageable pageable) { + private DiscoverQuery createDiscoverQuery(String metadataField, String searchValue) { DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setQuery(metadataField + ":" + "*" + searchValue + "*"); - discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); - discoverQuery.setMaxResults(pageable.getPageSize()); + discoverQuery.setMaxResults(500); // return only metadata field values discoverQuery.addSearchField(metadataField); discoverQuery.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE); @@ -286,14 +193,6 @@ private List convertMetadataValuesToWrappers(List Predicate distinctByKey(Function keyExtractor) { - Map map = new ConcurrentHashMap<>(); - return t -> map.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null; - } @Override @PreAuthorize("permitAll()") diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SuggestionRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SuggestionRestController.java new file mode 100644 index 000000000000..b0f5c051979d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SuggestionRestController.java @@ -0,0 +1,400 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.BadRequestException; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.app.rest.model.VocabularyEntryRest; +import org.dspace.app.rest.model.hateoas.VocabularyEntryResource; +import org.dspace.app.rest.utils.Utils; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.ClassPathResource; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.PagedModel; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +/** + * Returns VocabularyEntries that contain searchValue. The search is performed on the specific index that is defined by + * the `autocompleteCustom` parameter in the `submission-forms.xml`. + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +@RestController +@RequestMapping("/api/suggestions") +public class SuggestionRestController extends AbstractDSpaceRestRepository { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SuggestionRestController.class); + + + /** + * Prefix for the configuration that defines the separator for the autocompleteCustom parameter. + */ + private static final String AUTOCOMPLETE_CUSTOM_CFG_FORMAT_PREFIX = "autocomplete.custom.separator."; + + /** + * Solr prefix for the autocompleteCustom parameter that define the source of the suggestions. + */ + private static final String AUTOCOMPLETE_CUSTOM_SOLR_PREFIX = "solr-"; + + /** + * Json file prefix for the autocompleteCustom parameter that define the source of the suggestions. + */ + private static final String AUTOCOMPLETE_CUSTOM_JSON_PREFIX = "json_static-"; + + /** + * Query parameter from the autocompleteCustom parameter that define specific query for the Solr search. + */ + private static final String AUTOCOMPLETE_CUSTOM_SOLR_QUERY_PARAM = "query="; + + /** + * Limit of suggestions that will be returned from the JSON file. The limit is used to prevent + * the loading of a large amount of data from the JSON file. + */ + private static final int JSON_SUGGESTIONS_LIMIT = 8; + + @Autowired + private SearchService searchService; + + @Autowired + private ConfigurationService configurationService; + + /** + * Map that contains loaded JSON suggestions. The key is the autocompleteCustom parameter and the value is the + * loaded JSON data. The JSON data is loaded only once and stored in the map for further use. + */ + Map jsonSuggestions = new HashMap<>(); + + /** + * Returns a list of VocabularyEntryRest objects that contain values that contain searchValue. + * The search is performed on the specific index or a specific json file that is defined + * by the autocompleteCustom parameter. + */ + @PreAuthorize("hasAuthority('AUTHENTICATED')") + @RequestMapping(method = RequestMethod.GET) + public PagedModel filter(@Nullable HttpServletRequest request, + @Nullable Pageable optionalPageable, + @RequestParam(name = "autocompleteCustom", required = false) + String autocompleteCustom, + @RequestParam(name = "searchValue", required = false) + String searchValue, + PagedResourcesAssembler assembler) throws SearchServiceException { + // If the searching for the autocompleteCustom parameter is not allowed, return an error + if (!isAllowedSearching(autocompleteCustom)) { + String errorMessage = "Searching for autocompleteCustom: " + autocompleteCustom + " is not allowed"; + log.warn(errorMessage); + throw new BadRequestException(errorMessage); + } + + Pageable pageable = utils.getPageable(optionalPageable); + List results; + // Load suggestions from the specific source (Solr or JSON) + if (autocompleteCustom.startsWith(AUTOCOMPLETE_CUSTOM_JSON_PREFIX)) { + results = getSuggestions(autocompleteCustom, searchValue, AUTOCOMPLETE_CUSTOM_JSON_PREFIX); + } else if (autocompleteCustom.startsWith(AUTOCOMPLETE_CUSTOM_SOLR_PREFIX)) { + results = getSuggestions(autocompleteCustom, searchValue, AUTOCOMPLETE_CUSTOM_SOLR_PREFIX); + } else { + log.warn("Cannot fetch suggestions for autocompleteCustom: {} with searching value: {}", + autocompleteCustom, searchValue); + // Return empty list + results = new ArrayList<>(0); + } + + // If no results are found, return null + if (CollectionUtils.isEmpty(results)) { + log.info("No suggestions found for autocompleteCustom: {} with searching value: {}", + autocompleteCustom, searchValue); + } + + // Remove duplicates from the results + List finalResults = results.stream() + .filter(Utils.distinctByKey(VocabularyEntryRest::getValue)) + .collect(Collectors.toList()); + + // Remove `?query` from the autocompleteCustom parameter if it contains this specific query parameter + String autocompleteCustomWithoutQuery = updateAutocompleteAndQuery(autocompleteCustom, null); + // Format the values according to the configuration + finalResults = finalResults.stream() + .map(ver -> formatValue(ver, autocompleteCustomWithoutQuery)) + .collect(Collectors.toList()); + + // Create a page with the final results. The page is needed for the better processing in the frontend. + Page resultsPage = new PageImpl<>(finalResults, pageable, finalResults.size()); + PagedModel response = assembler.toModel(resultsPage); + return response; + } + + /** + * Returns a list of VocabularyEntryRest objects which contain values with searching value. + * The search is performed on the specific index or json file that is defined by the autocompleteCustom parameter. + */ + private List getSuggestions(String autocompleteCustom, String searchValue, String prefix) + throws SearchServiceException { + // Remove the prefix from the autocompleteCustom parameter + String normalizedAutocompleteCustom = removeAutocompleteCustomPrefix(prefix, autocompleteCustom); + // Normalize the search value - remove leading and trailing whitespaces + String normalizedSearchValue = searchValue.trim(); + // Create a list of VocabularyEntryRest objects that will be filtered from duplicate values and returned + // as a response. + List results = new ArrayList<>(); + + if (prefix.equals(AUTOCOMPLETE_CUSTOM_SOLR_PREFIX)) { + // Load suggestions from Solr + results = loadSuggestionsFromSolr(normalizedAutocompleteCustom, normalizedSearchValue, results); + } else if (prefix.equals(AUTOCOMPLETE_CUSTOM_JSON_PREFIX)) { + // Load suggestions from JSON + results = loadSuggestionsFromJson(normalizedAutocompleteCustom, normalizedSearchValue, results); + } + + return results; + + } + + /** + * Load suggestions from the JSON file. The JSON file is loaded only once and stored in the map for further use. + * The search is performed on the specific key in the JSON file. The key is the autocompleteCustom parameter. + */ + private List loadSuggestionsFromJson(String autocompleteCustom, String searchValue, + List results) { + try { + // Load the JSON data from the file. + JsonNode jsonData; + if (!jsonSuggestions.containsKey(autocompleteCustom)) { + // Load the JSON data from the file and store it in the map for further use. + JsonNode loadedJsonSuggestions = loadJsonFromFile(autocompleteCustom); + jsonData = loadedJsonSuggestions; + jsonSuggestions.put(autocompleteCustom, loadedJsonSuggestions); + } else { + // Get the JSON data from the map + jsonData = jsonSuggestions.get(autocompleteCustom); + } + + if (jsonData == null) { + log.warn("Cannot load JSON suggestions from file: {}", autocompleteCustom); + return results; + } + + // Search for a specific key + results = searchByKey(jsonData, searchValue, results); + + } catch (IOException e) { + log.error("Error while loading JSON suggestions from file: {} because: {}", autocompleteCustom, + e.getMessage()); + } + return results; + } + + /** + * Load suggestions from Solr. The search is performed on the specific index that is defined by the + * autocompleteCustom parameter. + */ + private List loadSuggestionsFromSolr(String autocompleteCustom, String searchValue, + List results) + throws SearchServiceException { + Context context = obtainContext(); + // Create a DiscoverQuery object that will be used to search for the results. + DiscoverQuery discoverQuery = new DiscoverQuery(); + // Process the custom query if it contains the specific query parameter `?query=` + autocompleteCustom = updateAutocompleteAndQuery(autocompleteCustom, discoverQuery); + // TODO - search facets and process facet results instead of indexable objects + discoverQuery.setMaxResults(500); + // return only metadata field values + discoverQuery.addSearchField(autocompleteCustom); + + String normalizedQuery = Utils.normalizeDiscoverQuery(searchValue, autocompleteCustom); + if (StringUtils.isNotBlank(normalizedQuery)) { + discoverQuery.setQuery(normalizedQuery); + } + + // Search for the results + DiscoverResult searchResult = searchService.search(context, discoverQuery); + + // Iterate over all indexable objects in the search result. We need indexable object to get search documents. + // Each search document contains values from the specific index. + processSolrSearchResults(searchResult, autocompleteCustom, searchValue, results); + + return results; + } + + /** + * Process the search results from Solr. The search results are processed and filtered according to the searchValue. + */ + private void processSolrSearchResults(DiscoverResult searchResult, String autocompleteCustom, String searchValue, + List results) { + searchResult.getIndexableObjects().forEach(object -> { + if (!(object instanceof IndexableItem)) { + return; + } + IndexableItem item = (IndexableItem) object; + // Get all search documents for the item. + searchResult.getSearchDocument(item).forEach((searchDocument) -> { + VocabularyEntryRest vocabularyEntryRest = new VocabularyEntryRest(); + // All values from Item's specific index - it could contain values we are not looking for. + // The must be filtered out. + List docValues = searchDocument.getSearchFieldValues(autocompleteCustom); + + // Filter values that contain searchValue + List filteredValues = docValues.stream() + .filter(value -> value.contains(searchValue)) + .collect(Collectors.toList()); + + // Add filtered values to the results. It contains only values that contain searchValue. + filteredValues.forEach(value -> { + vocabularyEntryRest.setDisplay(value); + vocabularyEntryRest.setValue(value); + results.add(vocabularyEntryRest); + }); + }); + }); + } + + /** + * Process the custom query if it contains the specific query parameter `?query=`. + * The query is processed and set to the DiscoverQuery object. + * The method returns the part before the query parameter as the new autocompleteCustom parameter. + * @param discoverQuery could be null + */ + private String updateAutocompleteAndQuery(String autocompleteCustom, DiscoverQuery discoverQuery) { + if (!autocompleteCustom.contains(AUTOCOMPLETE_CUSTOM_SOLR_QUERY_PARAM)) { + return autocompleteCustom; + } + + // Query parameter starts with `?` + String[] parts = autocompleteCustom.split("\\?" + AUTOCOMPLETE_CUSTOM_SOLR_QUERY_PARAM); + // 2 parts are expected - the part before the query parameter and after the query parameter + if (parts.length == 2) { + if (discoverQuery != null) { + discoverQuery.setQuery(parts[1]); + } + return parts[0]; // Return the part before "?query=" + } + + return autocompleteCustom; + } + + /** + * Load JSON data from the file. The JSON data is loaded from the resources' folder. + */ + public JsonNode loadJsonFromFile(String filePath) throws IOException { + // Load the file from the resources folder + ClassPathResource resource = new ClassPathResource(filePath); + + // Use Jackson ObjectMapper to read the JSON file + ObjectMapper objectMapper = new ObjectMapper(); + return objectMapper.readTree(resource.getInputStream()); + } + + /** + * Search for the specific key in the JSON object. The search is performed on the specific key in the JSON object. + * The key is the autocompleteCustom parameter. + */ + public List searchByKey(JsonNode jsonNode, String searchKey, + List results) { + // Iterate over all fields (keys) in the JSON object + Iterator fieldNames = jsonNode.fieldNames(); + while (fieldNames.hasNext() && results.size() < JSON_SUGGESTIONS_LIMIT) { + String key = fieldNames.next(); + + // If the key matches or contains the search term (case-insensitive) + if (key.toLowerCase().contains(searchKey.toLowerCase())) { + // Add key-value pair to the result + VocabularyEntryRest vocabularyEntryRest = new VocabularyEntryRest(); + vocabularyEntryRest.setDisplay(key); + vocabularyEntryRest.setValue(jsonNode.get(key).asText()); + results.add(vocabularyEntryRest); + } + } + return results; + } + + /** + * Format the value according to the configuration. + * The result value could consist of multiple parts separated by a separator. Keep the correct part separated by + * the separator loaded from the configuration. + */ + private VocabularyEntryRest formatValue(VocabularyEntryRest ver, String autocompleteCustom) { + if (StringUtils.isEmpty(ver.getValue()) || StringUtils.isEmpty(autocompleteCustom)) { + return ver; + } + + // Load separator from the configuration `autocomplete.custom.separator. + String separator = configurationService.getProperty(AUTOCOMPLETE_CUSTOM_CFG_FORMAT_PREFIX + autocompleteCustom); + if (StringUtils.isEmpty(separator)) { + return ver; + } + + // Split the value by the separator and keep the correct - second part + String[] parts = ver.getValue().split(separator); + // Check the length of the parts - the correct value is the second part + if (parts.length > 1) { + String formattedValue = parts[1].trim(); // The correct value is the second part + ver.setValue(formattedValue); + ver.setDisplay(formattedValue); + } + + return ver; + } + + /** + * Remove the prefix from the autocompleteCustom parameter. E.g. remove "solr-" or "json_static-". + */ + private String removeAutocompleteCustomPrefix(String prefix, String autocompleteCustom) { + return autocompleteCustom.replace(prefix, ""); + } + + /** + * Check if the autocompleteCustom parameter is allowed to be searched. + * To allow searching, the `autocomplete.custom.allowed` property must be defined in the configuration. + */ + private boolean isAllowedSearching(String autocompleteCustom) { + // Check if the autocompleteCustom parameter is allowed to be searched + String[] allowedAutocompleteCustom = configurationService.getArrayProperty("autocomplete.custom.allowed", + new String[0]); + + // Remove `?query` from the autocompleteCustom parameter if it contains this specific query parameter + String normalizedAutocompleteCustom = updateAutocompleteAndQuery(autocompleteCustom, null); + + // If the allowedAutocompleteCustom parameter is not defined, return false + if (Objects.isNull(allowedAutocompleteCustom)) { + return false; + } + + // Convert the allowedAutocompleteCustom array to a list + List allowedAutocompleteCustomList = Arrays.asList(allowedAutocompleteCustom); + return allowedAutocompleteCustomList.contains(normalizedAutocompleteCustom); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index 347a23b86de5..4d47e6484b64 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -48,6 +48,12 @@ import java.util.Set; import java.util.TreeSet; import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.annotation.Nullable; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; @@ -1133,4 +1139,109 @@ public static String encodeNonAsciiCharacters(String input) { } return result.toString(); } + + /** + * Update the solr DiscoverQuery because in some cases it won't search properly numbers and characters together. + * @param searchValue searching value + * @param searchField it could be special solr index or metadata field + * @return updated DiscoverQuery + */ + public static String normalizeDiscoverQuery(String searchValue, + String searchField) { + // regex if searchValue consist of numbers and characters + // \d - digit + String regexNumber = "(.)*(\\d)(.)*"; + // \D - non digit + String regexString = "(.)*(\\D)(.)*"; + Pattern patternNumber = Pattern.compile(regexNumber); + Pattern patternString = Pattern.compile(regexString); + // if the searchValue is mixed with numbers and characters the Solr ignore numbers by default + // divide the characters and numbers from searchValue to the separate queries and from separate queries + // create one complex query + if (patternNumber.matcher(searchValue).matches() && patternString.matcher(searchValue).matches()) { + List characterList = extractCharacterListFromString(searchValue); + List numberList = extractNumberListFromString(searchValue); + return composeQueryWithNumbersAndChars(searchField, characterList, numberList); + } + return null; + } + + /** + * From searchValue get all number values which are separated by the number to the List of Strings. + * @param searchValue e.g. 'my1Search2' + * @return e.g. [1, 2] + */ + private static List extractNumberListFromString(String searchValue) { + List numberList = new ArrayList<>(); + + // get numbers from searchValue as List + Pattern numberRegex = Pattern.compile("-?\\d+"); + Matcher numberMatcher = numberRegex.matcher(searchValue); + while (numberMatcher.find()) { + numberList.add(numberMatcher.group()); + } + + return numberList; + } + + /** + * From searchValue get all String values which are separated by the number to the List of Strings. + * @param searchValue e.g. 'my1Search2' + * @return e.g. [my, Search] + */ + private static List extractCharacterListFromString(String searchValue) { + List characterList = null; + // get characters from searchValue as List + searchValue = searchValue.replaceAll("[0-9]", " "); + characterList = new LinkedList<>(Arrays.asList(searchValue.split(" "))); + // remove empty characters from the characterList + characterList.removeIf(characters -> characters == null || "".equals(characters)); + + return characterList; + } + + /** + * From list of String and list of Numbers create a query for the SolrQuery. + * @param metadataField e.g. `dc.contributor.author` + * @param characterList e.g. [my, Search] + * @param numberList e.g. [1, 2] + * @return "dc.contributor.author:*my* AND dc.contributor.author:*Search* AND dc.contributor.author:*1* AND ..." + */ + private static String composeQueryWithNumbersAndChars(String metadataField, List characterList, + List numberList) { + addQueryTemplateToList(metadataField, characterList); + addQueryTemplateToList(metadataField, numberList); + + String joinedChars = String.join(" AND ", characterList); + String joinedNumbers = String.join(" AND ", numberList); + return joinedChars + " AND " + joinedNumbers; + + } + + /** + * Add SolrQuery template to the every item of the List + * @param metadataField e.g. `dc.contributor.author` + * @param stringList could be List of String or List of Numbers which are in the String format because of Solr + * e.g. [my, Search] + * @return [dc.contributor.author:*my*, dc.contributor.author:*Search*] + */ + private static List addQueryTemplateToList(String metadataField, List stringList) { + String template = metadataField + ":" + "*" + " " + "*"; + + AtomicInteger index = new AtomicInteger(); + stringList.forEach(characters -> { + String queryString = template.replaceAll(" ", characters); + stringList.set(index.getAndIncrement(), queryString); + }); + return stringList; + } + + /** + * Filter unique values from the list and return list with unique values + * @return List with unique values + */ + public static Predicate distinctByKey(Function keyExtractor) { + Map map = new ConcurrentHashMap<>(); + return t -> map.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 57a8dbb3c2b7..e1f9788a517e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -1422,7 +1422,7 @@ public void testBrowseByEntriesStartsWith() throws Exception { //Verify that the index filters to the "Universe" entries and Counts 2 Items. .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Universe", 2) + contains(BrowseEntryResourceMatcher.matchBrowseEntry("Universe".toLowerCase(), 2) ))) //Verify startsWith parameter is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=U"))); @@ -1446,7 +1446,8 @@ public void testBrowseByEntriesStartsWith() throws Exception { //Verify that the index filters to the "Turing, Alan'" items. .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Turing, Alan Mathison", 1) + contains(BrowseEntryResourceMatcher.matchBrowseEntry( + "Turing, Alan Mathison".toLowerCase(), 1) ))) //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=T"))); @@ -1469,7 +1470,8 @@ public void testBrowseByEntriesStartsWith() throws Exception { //Verify that the index filters to the "Computing'" items. .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Computing", 3) + contains(BrowseEntryResourceMatcher.matchBrowseEntry( + "Computing".toLowerCase(), 3) ))) //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=C"))); @@ -1544,9 +1546,12 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the index filters to the "Alonso, Nombre", "Álvarez, Nombre" and "Azuaga, Nombre" // and diacritics are ignored in sorting .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Alonso, Nombre", 1), - BrowseEntryResourceMatcher.matchBrowseEntry("Álvarez, Nombre", 1), - BrowseEntryResourceMatcher.matchBrowseEntry("Azuaga, Nombre", 1) + contains(BrowseEntryResourceMatcher.matchBrowseEntry( + "Alonso, Nombre".toLowerCase(), 1), + BrowseEntryResourceMatcher.matchBrowseEntry( + "Álvarez, Nombre".toLowerCase(), 1), + BrowseEntryResourceMatcher.matchBrowseEntry( + "Azuaga, Nombre".toLowerCase(), 1) ))) //Verify startsWith parameter is included in the links @@ -1570,8 +1575,10 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the index filters to the "Ögren, Name"" and "Ortiz, Nombre" .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Ögren, Name", 1), - BrowseEntryResourceMatcher.matchBrowseEntry("Ortiz, Nombre", 1) + contains(BrowseEntryResourceMatcher.matchBrowseEntry( + "Ögren, Name".toLowerCase(), 1), + BrowseEntryResourceMatcher.matchBrowseEntry( + "Ortiz, Nombre".toLowerCase(), 1) ))) //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=Ó"))); @@ -1596,9 +1603,12 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the index filters to the "Telecomunicaciones', "Teléfono" and "Televisor" and // it is sorted ignoring diacritics .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Telecomunicaciones", 1), - BrowseEntryResourceMatcher.matchBrowseEntry("Teléfono", 1), - BrowseEntryResourceMatcher.matchBrowseEntry("Televisor", 1) + contains(BrowseEntryResourceMatcher.matchBrowseEntry( + "Telecomunicaciones".toLowerCase(), 1), + BrowseEntryResourceMatcher.matchBrowseEntry( + "Teléfono".toLowerCase(), 1), + BrowseEntryResourceMatcher.matchBrowseEntry( + "Televisor".toLowerCase(), 1) ))) //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=Tele"))); @@ -1621,7 +1631,7 @@ public void testBrowseByEntriesStartsWithAndDiacritics() throws Exception { //Verify that the index filters to the "Guion" .andExpect(jsonPath("$._embedded.entries", - contains(BrowseEntryResourceMatcher.matchBrowseEntry("Guion", 1) + contains(BrowseEntryResourceMatcher.matchBrowseEntry("Guion".toLowerCase(), 1) ))) //Verify that the startsWith paramater is included in the links .andExpect(jsonPath("$._links.self.href", containsString("?startsWith=Guión"))); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoveryRestControllerIT.java index e1361a023b0c..cd2829dc626a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ClarinDiscoveryRestControllerIT.java @@ -358,8 +358,8 @@ public void discoverFacetsAuthorTestWithPrefix() throws Exception { // up in different items //These authors are order according to count. Only two show up because of the prefix. .andExpect(jsonPath("$._embedded.values", containsInAnyOrder( - FacetValueMatcher.entryAuthor("Smith, Maria"), - FacetValueMatcher.entryAuthor("Smith, Donald") + FacetValueMatcher.entryAuthor("Smith, Maria".toLowerCase()), + FacetValueMatcher.entryAuthor("Smith, Donald".toLowerCase()) ))) ; } @@ -1003,7 +1003,8 @@ public void discoverSearchTest() throws Exception { SearchFilterMatcher.clarinItemsCommunityFilter(), SearchFilterMatcher.clarinItemsTypeFilter(), SearchFilterMatcher.clarinSubjectFirstValueFilter(), - SearchFilterMatcher.clarinDataProviderFacet() + SearchFilterMatcher.clarinDataProviderFacet(), + SearchFilterMatcher.dcTypeFilter() ))) //These sortOptions need to be present as it's the default in the configuration .andExpect(jsonPath("$.sortOptions", contains( diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SuggestionRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SuggestionRestControllerIT.java new file mode 100644 index 000000000000..a449bfd29bc5 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SuggestionRestControllerIT.java @@ -0,0 +1,176 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration test for the {@link org.dspace.app.rest.repository.SuggestionRestController} + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +public class SuggestionRestControllerIT extends AbstractControllerIntegrationTest { + + private Item publicItem; + private Collection col; + private final String SUBJECT_SEARCH_VALUE = "test subject"; + private final String LANGUAGE_SEARCH_VALUE_KEY = "Alumu-Tesu"; + private final String LANGUAGE_SEARCH_VALUE_VALUE = "aab"; + private final String ITEM_TITLE = "Item title"; + + @Before + public void setup() throws Exception { + context.turnOffAuthorisationSystem(); + // 1. A community-collection structure with one parent community and one collection + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build(); + + // 2. Create item and add it to the collection + publicItem = ItemBuilder.createItem(context, col) + .withTitle(ITEM_TITLE) + .withMetadata("dc", "subject", null, SUBJECT_SEARCH_VALUE ) + .build(); + + context.restoreAuthSystemState(); + } + + /** + * Should return formatted suggestions in the VocabularyEntryRest objects + */ + @Test + public void testSearchBySubjectAcSolrIndex() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + // substring = find only by the `test` value + getClient(userToken).perform(get("/api/suggestions?autocompleteCustom=solr-subject_ac&searchValue=" + + SUBJECT_SEARCH_VALUE.substring(0, 4))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests", Matchers.hasItem( + allOf( + hasJsonPath("$.display", is(SUBJECT_SEARCH_VALUE)), + hasJsonPath("$.value", is(SUBJECT_SEARCH_VALUE)), + hasJsonPath("$.type", is("vocabularyEntry")) + )))); + } + + /** + * Should return no suggestions + */ + @Test + public void testSearchBySubjectAcSolrIndex_noResults() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + // substring = find only by the `test` value + getClient(userToken).perform(get("/api/suggestions?autocompleteCustom=solr-subject_ac&searchValue=" + + "no such subject")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests").doesNotExist()); + } + + /** + * Should return suggestions from the JSON file + */ + @Test + public void testSearchByLanguageFromJson() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + getClient(userToken).perform( + get("/api/suggestions?autocompleteCustom=json_static-iso_langs.json&searchValue=" + + LANGUAGE_SEARCH_VALUE_KEY)) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(1))) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests", Matchers.hasItem( + allOf( + hasJsonPath("$.display", is(LANGUAGE_SEARCH_VALUE_KEY)), + hasJsonPath("$.value", is(LANGUAGE_SEARCH_VALUE_VALUE)), + hasJsonPath("$.type", is("vocabularyEntry")) + )))); + } + + /** + * Should return no suggestions from the JSON file + */ + @Test + public void testSearchByLanguageFromJson_noResults() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + getClient(userToken).perform( + get("/api/suggestions?autocompleteCustom=json_static-iso_langs.json&searchValue=" + + "no such language")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests").doesNotExist()); + } + + /** + * Should return suggestions from the solr `title_ac` index. + * Compose specific query from the definition and the search value. + */ + @Test + public void testSearchBySpecificQueryFromSolr() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + getClient(userToken).perform( + get("/api/suggestions?autocompleteCustom=solr-title_ac?query=title_ac:**&searchValue=" + + ITEM_TITLE.substring(0, 4))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(1))) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests", Matchers.hasItem( + allOf( + hasJsonPath("$.display", is(ITEM_TITLE)), + hasJsonPath("$.value", is(ITEM_TITLE)), + hasJsonPath("$.type", is("vocabularyEntry")) + )))); + } + + /** + * Should return suggestions from the solr `title_ac` index. + * Compose specific query from the definition and the search value. + */ + @Test + public void testSearchBySpecificQueryFromSolr_noresults() throws Exception { + String userToken = getAuthToken(eperson.getEmail(), password); + getClient(userToken).perform( + get("/api/suggestions?autocompleteCustom=solr-title_ac?query=title_ac:**&searchValue=" + + "no such title")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.page.totalElements", is(0))) + .andExpect(jsonPath("$._embedded.vocabularyEntryRests").doesNotExist()); + } + + /** + * Should return 401 Forbidden + */ + @Test + public void testShouldNotAuthorized() throws Exception { + getClient().perform(get("/api/suggestions?autocompleteCustom=solr-title_ac?query=title_ac:**&searchValue=" + + "no such title")) + .andExpect(status().isUnauthorized()); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 23d66754dcc6..939873b5c4ba 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -131,6 +131,16 @@ public static Matcher clarinLicenseRightsFacet(boolean hasNext) ); } + public static Matcher dcTypeFacet(boolean hasNext) { + return allOf( + hasJsonPath("$.name", is("dctype")), + hasJsonPath("$.facetType", is("text")), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/dctype")), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/dctype")) + ); + } + public static Matcher clarinItemsLanguageFacet(boolean hasNext) { return allOf( hasJsonPath("$.name", is("language")), @@ -162,11 +172,11 @@ public static Matcher clarinItemsCommunityFacet(boolean hasNext) */ public static Matcher typeFacet(boolean b) { return allOf( - hasJsonPath("$.name", is("itemtype")), + hasJsonPath("$.name", is("type")), hasJsonPath("$.facetType", is("text")), hasJsonPath("$.facetLimit", any(Integer.class)), - hasJsonPath("$._links.self.href", containsString("api/discover/facets/itemtype")), - hasJsonPath("$._links", matchNextLink(b, "api/discover/facets/itemtype")) + hasJsonPath("$._links.self.href", containsString("api/discover/facets/type")), + hasJsonPath("$._links", matchNextLink(b, "api/discover/facets/type")) ); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SearchFilterMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SearchFilterMatcher.java index b5533a955505..c708ca96b98c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SearchFilterMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SearchFilterMatcher.java @@ -188,7 +188,7 @@ public static Matcher clarinItemsCommunityFilter() { public static Matcher clarinItemsTypeFilter() { return allOf( - hasJsonPath("$.filter", is("itemtype")), + hasJsonPath("$.filter", is("type")), hasJsonPath("$.hasFacets", is(true)), hasJsonPath("$.type", is("text")), hasJsonPath("$.openByDefault", is(false)), @@ -226,4 +226,14 @@ public static Matcher clarinDataProviderFacet() { checkOperators() ); } + + public static Matcher dcTypeFilter() { + return allOf( + hasJsonPath("$.filter", is("dctype")), + hasJsonPath("$.hasFacets", is(false)), + hasJsonPath("$.type", is("text")), + hasJsonPath("$.openByDefault", is(false)), + checkOperators() + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/UtilsTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/UtilsTest.java new file mode 100644 index 000000000000..68c521bfaf06 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/UtilsTest.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.utils; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.dspace.AbstractUnitTest; +import org.junit.Test; + +/** + * Unit tests for {@link Utils} + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +public class UtilsTest extends AbstractUnitTest { + + @Test + public void testNormalizeDiscoverQueryWithMixedCharactersAndNumbers() { + String searchValue = "my1Search2"; + String searchField = "dc.contributor.author"; + + String expected = "dc.contributor.author:*my* AND dc.contributor.author:*Search* AND " + + "dc.contributor.author:*1* AND dc.contributor.author:*2*"; + + String result = Utils.normalizeDiscoverQuery(searchValue, searchField); + assertEquals(expected, result); + } + + @Test + public void testNormalizeDiscoverQueryWithOnlyCharacters() { + String searchValue = "mySearch"; + String searchField = "dc.contributor.author"; + + String result = Utils.normalizeDiscoverQuery(searchValue, searchField); + assertNull(result); + } + + @Test + public void testNormalizeDiscoverQueryWithOnlyNumbers() { + String searchValue = "12345"; + String searchField = "dc.contributor.author"; + + String result = Utils.normalizeDiscoverQuery(searchValue, searchField); + assertNull(result); + } + + @Test + public void testNormalizeDiscoverQueryWithEmptyString() { + String searchValue = ""; + String searchField = "dc.contributor.author"; + + String result = Utils.normalizeDiscoverQuery(searchValue, searchField); + assertNull(result); + } +} diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index 6c00f45b5a1e..708e5c9b1c9b 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -288,3 +288,14 @@ elg.download-location.exposed = 0 # left here for reference #download.email.cc = ${info.recipient} download.email.cc = ${mail.admin} + +#### Submission forms #### +# `autocomplete.custom.separator.*` is a special separator used to distinguish values +# in the results from the current Solr index. For example, `solr-subject_ac` is a special index for subjects, +# and its values look like this: `value1 ||| VALUE1`. The correct value is the second one. +# `//` is because of special character +autocomplete.custom.separator.solr-subject_ac = \\|\\|\\| +autocomplete.custom.separator.solr-publisher_ac = \\|\\|\\| +autocomplete.custom.separator.solr-dataProvider_ac = \\|\\|\\| +autocomplete.custom.separator.solr-dctype_ac = \\|\\|\\| +autocomplete.custom.allowed = solr-author_ac,solr-publisher_ac,solr-dataProvider_ac,solr-dctype_ac,solr-subject_ac,solr-handle_title_ac,json_static-iso_langs.json diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index a5cce1ffd2b9..c83e3b011aff 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -200,6 +200,7 @@ + @@ -2545,7 +2546,7 @@ - + dc.type @@ -2557,6 +2558,17 @@ + + + + + dc.type + + + + + + diff --git a/dspace/config/submission-forms.dtd b/dspace/config/submission-forms.dtd index 932494dc7728..01bb8ea60d9f 100644 --- a/dspace/config/submission-forms.dtd +++ b/dspace/config/submission-forms.dtd @@ -81,6 +81,7 @@ + diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 1a63054ec9d2..e00cf2fb5936 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -85,7 +85,7 @@ author true - clarin-name + autocomplete Enter the author's name (Family name, Given names). true - tag + autocomplete Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for multiple keywords or use separators i.e., Enter and comma, which will split it accordingly. Start typing the keyword and use autocomplete form that will appear. End your input by pressing @@ -1703,7 +1703,7 @@ true - autocomplete + autocomplete The name of the publisher of the original analog or born digital object. Use your home institution if this is a born digital object being published now. Start typing the publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you @@ -1719,9 +1719,9 @@ false - autocomplete + autocomplete This concerns the digital object (not the analog - original). An institution from which the data come. Used eg. to give proper attribution. Generally + original). An institution from which the data come. Used e.g. to give proper attribution. Generally different from publisher. @@ -1757,7 +1757,7 @@ false - autocomplete + autocomplete The type should be different from what you have entered in the first step. Examples: photo or painting for IMAGE, book or letter for TEXT, etc. Type is required @@ -1787,7 +1787,7 @@ true TEXT - autocomplete + autocomplete Select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -1802,7 +1802,7 @@ true VIDEO,IMAGE,SOUND,3D - autocomplete + autocomplete Optionally, select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -1818,7 +1818,7 @@ true - tag + autocomplete Enter appropriate subject keyword or phrase and press the Add button. Use keywords to specify also people, places and times (period, era, date range etc) the resource is about. You can use hierarchical subjects, separate the hierarchy levels with two colons (::). Eg. @@ -1985,7 +1985,7 @@ true - autocomplete + autocomplete The name of the publisher of the original analog or born digital object. Use your home institution if this is a born digital object being published now. Start typing the publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you @@ -2052,7 +2052,7 @@ iso true - autocomplete + autocomplete Select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -2067,7 +2067,7 @@ true - autocomplete + autocomplete Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. Start typing the keyword and use autocomplete form that will appear. End your input by pressing @@ -2116,7 +2116,7 @@ replaces true - onebox + autocomplete URL to a related resource that is supplanted, displaced, or superseded by the described resource. If the replaced resource is in this repository start typing its name or handle and select the resource from the autocomplete popup. @@ -2130,7 +2130,7 @@ isreplacedby true - onebox + autocomplete A related resource that supplants, displaces, or supersedes the described resource. @@ -3225,7 +3225,7 @@ - + diff --git a/dspace/config/submission-forms_cs.xml b/dspace/config/submission-forms_cs.xml index bd06c7fdeac4..227508071cb2 100644 --- a/dspace/config/submission-forms_cs.xml +++ b/dspace/config/submission-forms_cs.xml @@ -85,7 +85,7 @@ author true - clarin-name + autocomplete Enter the author's name (Family name, Given names). true - tag + autocomplete Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for multiple keywords or use separators i.e., Enter and comma, which will split it accordingly. Start typing the keyword and use autocomplete form that will appear. End your input by pressing @@ -1703,7 +1703,7 @@ true - autocomplete + autocomplete The name of the publisher of the original analog or born digital object. Use your home institution if this is a born digital object being published now. Start typing the publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you @@ -1719,9 +1719,9 @@ false - autocomplete + autocomplete This concerns the digital object (not the analog - original). An institution from which the data come. Used eg. to give proper attribution. Generally + original). An institution from which the data come. Used e.g. to give proper attribution. Generally different from publisher. @@ -1757,7 +1757,7 @@ false - autocomplete + autocomplete The type should be different from what you have entered in the first step. Examples: photo or painting for IMAGE, book or letter for TEXT, etc. Type is required @@ -1771,7 +1771,6 @@ false - aaa textarea Enter a description of the submitted data. Please give us a description @@ -1785,7 +1784,7 @@ true TEXT - autocomplete + autocomplete Select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -1800,7 +1799,7 @@ true VIDEO,IMAGE,SOUND,3D - autocomplete + autocomplete Optionally, select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -1816,7 +1815,7 @@ true - tag + autocomplete Enter appropriate subject keyword or phrase and press the Add button. Use keywords to specify also people, places and times (period, era, date range etc) the resource is about. You can use hierarchical subjects, separate the hierarchy levels with two colons (::). Eg. @@ -1983,7 +1982,7 @@ true - autocomplete + autocomplete The name of the publisher of the original analog or born digital object. Use your home institution if this is a born digital object being published now. Start typing the publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you @@ -2050,7 +2049,7 @@ iso true - autocomplete + autocomplete Select the language of the main content of the item. Multiple languages are possible. Start typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). @@ -2065,7 +2064,7 @@ true - autocomplete + autocomplete Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. Start typing the keyword and use autocomplete form that will appear. End your input by pressing @@ -2106,7 +2105,7 @@ - + dc @@ -2114,7 +2113,7 @@ replaces true - onebox + autocomplete URL příbuzného záznamu, který je tímto záznamem nahrazen. Pokud je příbuzný záznam v tomto repozitáři, začněte psát jeho název, nebo handle a vyberte záznam z nabídky. @@ -2128,7 +2127,7 @@ isreplacedby true - onebox + autocomplete Příbuzný záznam, který nahrazuje tento. @@ -3223,7 +3222,7 @@ - + diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index a80cfdedfd34..3a6125c1f9ac 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -126,6 +126,7 @@ + @@ -140,6 +141,7 @@ + - + @@ -194,11 +197,17 @@ - + + + + + + + - + - + @@ -361,5 +370,21 @@ + + + + + + + + + + + + + + + + From c25374e0b2ff6e9f8aba429c8372a466affbcad5 Mon Sep 17 00:00:00 2001 From: jurinecko <95219754+jr-rk@users.noreply.github.com> Date: Tue, 19 Nov 2024 14:52:25 +0100 Subject: [PATCH 20/45] UFAL/Changed position of rows in submission-forms.xml following v5 (#802) * Changed position of rows in submission-forms.xml following v5 * Fixed the alignment of some text * Removed rows which are not in the v5 * Removed license selector from the `teaching` and `clariah-data` collections * Updated input differences in the submission-forms.xml following the v5. --------- Co-authored-by: Juraj Roka Co-authored-by: milanmajchrak --- dspace/config/item-submission.xml | 2 - dspace/config/submission-forms.xml | 366 ++++++++++++++--------------- 2 files changed, 176 insertions(+), 192 deletions(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 5726135a99a0..09a4fe3b4176 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -463,7 +463,6 @@ - @@ -475,7 +474,6 @@ - diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index e00cf2fb5936..f6079e4b8262 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -53,60 +53,30 @@
- - - local - contact - person - true - - - complex - This is contact person - - - - - - local - sponsor - true - - - complex - This is funding - - - dc - contributor - author + type + true - - autocomplete - Enter the author's name (Family name, Given names). + + dropdown + Select the type of content of the item. + - - - dc - title - - false - - onebox - Enter the main title of the item. - You must enter a main title for this item. - + + dc + title + + false + + onebox + Enter the main title of the item in English. + You must enter a main title for this item. + @@ -153,78 +123,119 @@ date issued false - + date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. + Please give the date when the submission data were issued if any e.g., 2014-01-21 or at least + the year. - You must enter at least the year. + You must enter the date or at least the year in a valid format. dc publisher - false + true autocomplete - Enter the name of the publisher of the previously issued instance of this item. - + Enter the name of the publisher of the previously issued instance of this item, or your home + institution. Start typing the publisher and use autocomplete form that will appear if + applicable. End your input by pressing ESC if you don't want to use the preselected value. + You must enter the name of the publisher. - dc - identifier - citation - false - - onebox - Enter the standard citation for the previously issued instance of this item. - + local + hidden + + true + + + list + Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" + for weblicht submissions. + + + policy=deny,action=read,grantee-type=user,grantee-id=* + - dc - relation - ispartofseries + local + hasCMDI true - - Technical Report - series - Enter the series and number assigned to this item by your community. + + list + Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht + submissions. + + policy=deny,action=read,grantee-type=user,grantee-id=* + dc - identifier - - + contributor + author true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - + + autocomplete + Enter the names of the authors of this item. Start typing the author's last name and use + autocomplete form that will appear if applicable. End your input by pressing ESC if you don't + want to use the preselected value.. + Please add author(s) + + + + + + local + contact + person + true + + + complex + Person to contact in case of any issues with this submission. + Please fill all the fields for the contact person. + + + + + local + sponsor + true + + + complex + Acknowledge sponsors and funding that supported work described by this submission. +
+ +
dc - type + description - true - - dropdown - Select the type of content of the item. - - + false + + textarea + Enter a description of the submitted data. + Please give us a description @@ -256,6 +267,24 @@ + + + dc + subject + + + true + + tag + Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for + multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. + Start typing the keyword and use autocomplete form that will appear. End your input by pressing + ESC if you don't want to use the preselected value. + + Please enter at least one subject related to your submission + srsc + + local @@ -265,9 +294,11 @@ corpus,languageDescription,lexicalConceptualResource complex - You can state the extent of the submitted data, eg. the number of tokens. + You can state the extent of the submitted data, e.g., the number of tokens. + + metashare @@ -298,8 +329,6 @@ Media type is required - - metashare @@ -354,49 +383,20 @@ Please indicate whether the tool is language dependent - - - local - hasCMDI - true - - list - Are you going to upload cmdi file? - - - policy=deny,action=read,grantee-type=user,grantee-id=* - - - - - - local - hidden - - true - - - list - Should item be harvestable thru OAI-PMH but behave like private? - - - policy=deny,action=read,grantee-type=user,grantee-id=* - - - local bitstream redirectToURL false - + onebox - The actual maximum upload size of the file is 4GB. To upload a file bigger than the - maximum upload size, enter the URL of that large file. The admin must know the URL - of that bitstream file. Then, click on the 'Save' button, and the file will start - to upload. The file path must be an absolute path. + Please enter the full path of the file, either from the local server or an HTTP location. + The maximum upload size for a file is 4GB. To upload a file larger than this limit, provide the + URL of the file. Ensure the admin is aware of the URL of the bitstream file. + Once the file path or URL is entered, click the 'Save' button to begin the upload. + Note: The file path must be an absolute path. @@ -408,62 +408,6 @@ - - - - dc - subject - - - true - - autocomplete - Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for - multiple keywords or use separators i.e., Enter and comma, which will split it accordingly. - Start typing the keyword and use autocomplete form that will appear. End your input by pressing - ESC if you don't want to use the preselected value. - - Please enter at least one subject related to your submission - srsc - - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - description - sponsorship - false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - - - - - - dc - description - - false - - textarea - Enter any other description or comments in this box. - - - - Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht @@ -1721,7 +1665,7 @@ autocomplete This concerns the digital object (not the analog - original). An institution from which the data come. Used e.g. to give proper attribution. Generally + original). An institution from which the data come. Used e.g., to give proper attribution. Generally different from publisher. @@ -1857,6 +1801,27 @@ You can state the extent of the submitted data, eg. the number of tokens.
+ + + local + bitstream + redirectToURL + false + + onebox + + Please enter the full path of the file, either from the local server or an HTTP location. + The maximum upload size for a file is 4GB. To upload a file larger than this limit, provide the + URL of the file. Ensure the admin is aware of the URL of the bitstream file. + Once the file path or URL is entered, click the 'Save' button to begin the upload. + Note: The file path must be an absolute path. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + +
@@ -1925,7 +1890,7 @@ hidden false - + list Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" for weblicht submissions. @@ -1942,7 +1907,7 @@ hasMetadata false - + list Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht @@ -2092,6 +2057,27 @@ + + + local + bitstream + redirectToURL + false + + onebox + + Please enter the full path of the file, either from the local server or an HTTP location. + The maximum upload size for a file is 4GB. To upload a file larger than this limit, provide the + URL of the file. Ensure the admin is aware of the URL of the bitstream file. + Once the file path or URL is entered, click the 'Save' button to begin the upload. + Note: The file path must be an absolute path. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + +
@@ -2129,7 +2115,7 @@ relation isreplacedby true - + autocomplete A related resource that supplants, displaces, or supersedes the described resource. @@ -3204,7 +3190,7 @@ - Hidden + Yes hidden @@ -3222,15 +3208,15 @@ Uncomment the example row of the complex input type definition to see this input in the submission UI. --> - - - - + + + + - + @@ -3247,4 +3233,4 @@ required="true"/> - + \ No newline at end of file From 37cf9047c63eb00903e225c7fae146be4ad07cdc Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 21 Nov 2024 15:37:22 +0100 Subject: [PATCH 21/45] Show db connection statistics in the log file or the `dbstatistics` endpoint (#815) * Show db statistics in the log file or the `dbstatistics` endpoint * Finding out why github checks are failed - undo hibernate.cfg * Disabled automatic logging * Use scheduled CRON job instead of PostConstruct * hibernate generating property true --------- Co-authored-by: Paurikova2 --- .../main/java/org/dspace/core/Context.java | 12 ++++ .../dspace/core/HibernateDBConnection.java | 62 +++++++++++++++++++ .../ClarinAutoRegistrationController.java | 2 +- .../DBConnectionStatisticsController.java | 39 ++++++++++++ dspace/config/hibernate.cfg.xml | 1 + 5 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/DBConnectionStatisticsController.java diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 02a3fee09f8a..8eed24348c39 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -976,4 +976,16 @@ public Group getAdminGroup() throws SQLException { .getGroupService() .findByName(this, Group.ADMIN) : adminGroup; } + + /** + * Get the Hibernate statistics for this context. + * Only available when using HibernateDBConnection. + * @return the Hibernate statistics as a String + */ + public String getHibernateStatistics() { + if (dbConnection instanceof HibernateDBConnection) { + return ((HibernateDBConnection) dbConnection).getHibernateStatistics(); + } + return "Hibernate statistics are not available for this database connection"; + } } diff --git a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java index b371af80eede..f8c620380d5f 100644 --- a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java @@ -12,6 +12,8 @@ import java.sql.SQLException; import javax.sql.DataSource; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; @@ -29,9 +31,11 @@ import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.proxy.HibernateProxyHelper; import org.hibernate.resource.transaction.spi.TransactionStatus; +import org.hibernate.stat.Statistics; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.orm.hibernate5.SessionFactoryUtils; +import org.springframework.scheduling.annotation.Scheduled; /** * Hibernate implementation of the DBConnection. @@ -64,6 +68,8 @@ public class HibernateDBConnection implements DBConnection { private boolean batchModeEnabled = false; private boolean readOnlyEnabled = false; + private static final Logger log = LogManager.getLogger(HibernateDBConnection.class); + /** * Retrieves the current Session from Hibernate (per our settings, Hibernate is configured to create one Session * per thread). If Session doesn't yet exist, it is created. A Transaction is also initialized (or reinintialized) @@ -102,6 +108,13 @@ protected Transaction getTransaction() { return sessionFactory.getCurrentSession().getTransaction(); } + // This method will run every 10 seconds + @Scheduled(fixedRate = 10000) // Fixed rate in milliseconds + public void logConnectionMetrics() { + logHibernateStatistics(); + logDatabaseMetaData(); + } + /** * Check if Hibernate Session is still "alive" / open. An open Session may or may not have an open Transaction * (so isTransactionAlive() may return false even if isSessionAlive() returns true). A Session may be reused for @@ -350,4 +363,53 @@ public void flushSession() throws SQLException { getSession().flush(); } } + + + /** + * Log the Hibernate statistics (e.g. open sessions, closed sessions, transactions, connections obtained) + */ + private void logHibernateStatistics() { + if (sessionFactory != null) { + log.info(getHibernateStatistics()); + } else { + log.warn(getHibernateStatistics()); + } + } + + /** + * Log the database metadata (URL, User, Driver, Product, Version) + */ + private void logDatabaseMetaData() { + try (Session session = sessionFactory.openSession()) { + // Use doReturningWork to safely interact with the JDBC Connection + session.doReturningWork(connection -> { + try { + DatabaseMetaData metaData = connection.getMetaData(); + log.info("Database Metadata - URL: {}, User: {}, Driver: {}, Product: {} {}" + , metaData.getURL(), metaData.getUserName(), metaData.getDriverName(), + metaData.getDatabaseProductName(), metaData.getDatabaseProductVersion()); + } catch (SQLException e) { + log.warn("Failed to retrieve database metadata: {}", e.getMessage()); + } + return null; // Returning null as no specific result is needed + }); + } catch (Exception e) { + log.warn("Failed to log database metadata: {}", e.getMessage()); + } + } + + /** + * Get Hibernate statistics as a string + */ + public String getHibernateStatistics() { + if (sessionFactory != null) { + Statistics stats = sessionFactory.getStatistics(); + return "Hibernate Statistics - Open Sessions: " + stats.getSessionOpenCount() + ", Closed Sessions: " + + stats.getSessionCloseCount() + ", Transactions: " + stats.getTransactionCount() + + ", Connections Obtained: " + stats.getConnectCount(); + } else { + return "SessionFactory is not available for logging Hibernate statistics."; + } + } } + diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java index af6c01714fa7..eef3b8495699 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java @@ -41,7 +41,7 @@ * * This Shibboleth Authentication process is tested in ClarinShibbolethLoginFilterIT. * - * @author Milan Majchrak (milan.majchrak at dataquest.sk) + * @author Milan Majchrak (dspace at dataquest.sk) */ @RequestMapping(value = "/api/autoregistration") @RestController diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/DBConnectionStatisticsController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DBConnectionStatisticsController.java new file mode 100644 index 000000000000..9472a3e12f5d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/DBConnectionStatisticsController.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.core.Context; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * Controller for retrieving database connection statistics + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +@PreAuthorize("hasAuthority('ADMIN')") +@RequestMapping(value = "/api/dbstatistics") +@RestController +public class DBConnectionStatisticsController { + @RequestMapping(method = RequestMethod.GET) + public ResponseEntity getStatistics(HttpServletRequest request) { + + Context context = ContextUtil.obtainContext(request); + if (context == null) { + return ResponseEntity.status(500).build(); + } + // Return response entity with the statistics + return ResponseEntity.ok().body(context.getHibernateStatistics()); + } +} diff --git a/dspace/config/hibernate.cfg.xml b/dspace/config/hibernate.cfg.xml index 82e4fd738038..5a7de653a8cb 100644 --- a/dspace/config/hibernate.cfg.xml +++ b/dspace/config/hibernate.cfg.xml @@ -28,6 +28,7 @@ org.ehcache.jsr107.EhcacheCachingProvider + true From e29101b7da95c98455895e0090d348870f2bb352 Mon Sep 17 00:00:00 2001 From: jurinecko <95219754+jr-rk@users.noreply.github.com> Date: Tue, 26 Nov 2024 17:10:32 +0100 Subject: [PATCH 22/45] Translation of submission-forms to _cs (#816) * Translation of submission-forms to _cs * Translated bitstream metadata and complex input fields * Translated the rest of submission-froms_cs.xml * Fixed regex... it must contain regex value, not the message. --------- Co-authored-by: Juraj Roka Co-authored-by: milanmajchrak --- dspace/config/submission-forms.xml | 35 +- dspace/config/submission-forms_cs.xml | 779 ++++++++++++-------------- 2 files changed, 389 insertions(+), 425 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index f6079e4b8262..b1f0a6b032f6 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -59,11 +59,14 @@ type true - + dropdown - Select the type of content of the item. + Type of the resource: "Corpus" refers to text, speech and multimodal corpora. + "Lexical Conceptual Resource" includes lexica, ontologies, dictionaries, word lists etc. + "language Description" covers language models and grammars. + "Technology / Tool / Service" is used for tools, systems, system components etc. - + Please select a resource type for your submission. @@ -142,7 +145,8 @@ autocomplete Enter the name of the publisher of the previously issued instance of this item, or your home institution. Start typing the publisher and use autocomplete form that will appear if - applicable. End your input by pressing ESC if you don't want to use the preselected value. + applicable. End your input by pressing ESC if you don't want to use the preselected value. + You must enter the name of the publisher. @@ -156,7 +160,8 @@ list Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" - for weblicht submissions. + for weblicht submissions. + policy=deny,action=read,grantee-type=user,grantee-id=* @@ -171,7 +176,8 @@ list Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht - submissions. + submissions. + policy=deny,action=read,grantee-type=user,grantee-id=* @@ -188,7 +194,8 @@ autocomplete Enter the names of the authors of this item. Start typing the author's last name and use autocomplete form that will appear if applicable. End your input by pressing ESC if you don't - want to use the preselected value.. + want to use the preselected value. + Please add author(s) - - - local - contact - person - true - - - complex - This is contact person - - - - - - local - sponsor - true - - - complex - This is funding - - - dc - contributor - author + type + true - - autocomplete - Enter the author's name (Family name, Given names). - - + + dropdown + "Corpus" označuje textové, řečové i multimodální korpusy. + "Lexical Conceptual Resource" zahrnuje lexikony, ontologie, slovníky, seznamy slov apod. + "Language Description" zahrnuje jazykové modely a gramatiky. + "Technology / Tool / Service" se používá pro nástroje, systémy, systémové komponenty atd. + + Prosím zvolte typ dat pro váš příspěvek. @@ -140,11 +113,11 @@ relation isreferencedby true - + onebox - If the item has any alternative titles, please enter them here. - - http.* + Odkaz na původní článek, který zmiňuje tento záznam. + + http.* @@ -153,78 +126,111 @@ date issued false - + date - Please give the date of previous publication or public distribution. - You can leave out the day and/or month if they aren't applicable. - - You must enter at least the year. + Uveďte prosím datum vydání příspěvku, např 2014-01-21 nebo alespoň rok. + Musíte uvést datum v platném formátu. dc publisher - false - + true + autocomplete - Enter the name of the publisher of the previously issued instance of this item. - + Uveďte vydavatele předchozího vydání, nebo vaši domovskou instituci. Začnete-li vyplňovat vydavatele, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Musíte uvést vydavatele. - dc - identifier - citation - false - - onebox - Enter the standard citation for the previously issued instance of this item. - + local + hidden + + true + + + list + Uveďte, má-li být záznam skryt ve vyhledávání a procházení. Pro příspěvky pro weblicht kombinujte s "Nahrát cmdi". + + + policy=deny,action=read,grantee-type=user,grantee-id=* + - dc - relation - ispartofseries + local + hasCMDI true - - Technical Report - series - Enter the series and number assigned to this item by your community. + + list + Uveďte, jestli se chystáte v dalším kroku nahrát cmdi soubor. Kombinujte se schováváním záznamů pro weblicht příspěvky. + + policy=deny,action=read,grantee-type=user,grantee-id=* + dc - identifier - - + contributor + author true - - qualdrop_value - If the item has any identification numbers or codes associated with - it, please enter the types and the actual numbers or codes. - + + autocomplete + Uveďte jména autorů tohoto záznamu. Začnete-li vyplňovat příjmení, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Uveďte prosím autora(y) + + + + + + local + contact + person + true + + + complex + Osoba, která bude kontaktována v případě problémů s tímto záznamem. + Vyplňte prosím všechna pole u kontaktní osoby + + + + + local + sponsor + true + + + complex + Uveďte sponzory a zdroje financí podporující vznik práce popsané v tomto příspěvku. + + +
dc - type + description - true - - dropdown - Select the type of content of the item. - - + false + + textarea + Popište nahrávaná data. + Uveďte prosím popis. @@ -233,14 +239,11 @@ language iso false - + corpus,lexicalConceptualResource,languageDescription autocomplete - Select the language of the main content of the item. If the language does not appear in the - list, please select 'Other'. If the content does not really have a language (for example, if it - is a dataset or an image) please select 'N/A'. - - Please choose a language for the resource. + Vyberte jazyky, jichž se data tohoto záznamu týkají. Je možné zvolit více jazyků. Začnete-li psát, objeví se nápověda. Je lepší vyjmenovat všechny dotčené jazyky (pokud jich je větší množství, kontaktujte podporu), než používat iso kód 'mul'. + Prosím zvolte jazyk. @@ -249,38 +252,54 @@ language iso true - + toolService autocomplete - If the tool/service is language dependent, select the appropriate language(s). Otherwise leave the field empty. Multiple languages are possible. Start typing the language and use autocomplete form that will appear. + Pokud je nástroj/služba jazykově závislá, uveďte potřebné jazyky. Jinak můžete nechat nevyplněné. Je možné zvolit více jazyků. Začnete-li psát, objeví se nápověda. + + + dc + subject + + + true + + tag + Uveďte vhodná klíčová slova, nebo fráze a zmáčkněte tlačítko přidat. + Klíčová slova buď přidávejte po jednom, nebo je oddělte čárkou, nebo středníkem. Začnete-li psát, objeví se nápověda. + + Uveďte alespoň jedno klíčové slovo. + srsc + + local size info true - + corpus,languageDescription,lexicalConceptualResource complex - You can state the extent of the submitted data, eg. the number of tokens. + Můžete uvést rozsah nahraných dat, například počet tokenů. + + metashare ResourceInfo#ContentInfo mediaType false - + corpus,lexicalConceptualResource dropdown - Media type of the main content of the item e.g., "text" for - textual corpora or "audio" for audio recordings. - - Media type is required + Zvolte druh média tohoto záznamu, např. "text" pro textový korpus, "audio" pro audio nahrávky. + Uveďte typ média @@ -289,28 +308,24 @@ ResourceInfo#ContentInfo mediaType false - + languageDescription dropdown - Media type of the main content of the item e.g., "text" for - textual corpora or "audio" for audio recordings. - - Media type is required + Zvolte druh média tohoto záznamu, např. "text" pro textový korpus, "audio" pro audio nahrávky. + Uveďte typ média - - metashare ResourceInfo#ContentInfo detailedType false - + toolService dropdown - Choose one of the types + Zvolte jeden z podtypů @@ -319,11 +334,11 @@ ResourceInfo#ContentInfo detailedType false - + languageDescription dropdown - Choose one of the types + Zvolte jeden z podtypů @@ -332,11 +347,11 @@ ResourceInfo#ContentInfo detailedType false - + lexicalConceptualResource dropdown - Choose one of the types + Zvolte jeden z podtypů @@ -345,43 +360,11 @@ ResourceInfo#ResourceComponentType#ToolServiceInfo languageDependent false - + toolService list - Indicate whether the operation of the tool or service is - language dependent or not - - Please indicate whether the tool is language dependent - - - - - local - hasCMDI - true - - list - Are you going to upload cmdi file? - - - policy=deny,action=read,grantee-type=user,grantee-id=* - - - - - - local - hidden - - true - - - list - Should item be harvestable thru OAI-PMH but behave like private? - - - policy=deny,action=read,grantee-type=user,grantee-id=* - + Uveďte zda funkce nástroje či služby závisí na konkrétním jazyku. + Uveďte prosím zda je nástroj jazykově závislý. @@ -390,13 +373,14 @@ bitstream redirectToURL false - + onebox - The actual maximum upload size of the file is 4GB. To upload a file bigger than the - maximum upload size, enter the URL of that large file. The admin must know the URL - of that bitstream file. Then, click on the 'Save' button, and the file will start - to upload. The file path must be an absolute path. + Prosím, zadejte úplnou cestu k souboru, ať už z místního serveru, nebo z webové lokace (HTTP). + Maximální velikost nahrávaného souboru je 4 GB. Pokud chcete nahrát soubor větší než tento limit, zadejte jeho URL. + Ujistěte se, že administrátor je obeznámen s URL souboru datového proudu (bitstream). + Jakmile zadáte cestu k souboru nebo URL, klikněte na tlačítko „Uložit“ pro zahájení nahrávání. + Poznámka: Cesta k souboru musí být absolutní. @@ -405,65 +389,8 @@
- -
- - - dc - subject - - - true - - autocomplete - Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for - multiple keywords or use separators i.e., Enter and comma, which will split it accordingly. - Start typing the keyword and use autocomplete form that will appear. End your input by pressing - ESC if you don't want to use the preselected value. - - Please enter at least one subject related to your submission - srsc - - - - - dc - description - abstract - false - - textarea - Enter the abstract of the item. - - - - - - dc - description - sponsorship - false - - textarea - Enter the names of any sponsors and/or funding codes in the box. - - - - - - dc - description - - false - - textarea - Enter any other description or comments in this box. - - - -
- - - - - - - - - + + + + + + + + + + person @@ -711,36 +638,36 @@ Enter the id of the project - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + isProjectOfPerson @@ -1582,12 +1509,12 @@ type false - + dropdown - Choose one of TEXT, VIDEO, SOUND, IMAGE, 3D. If choosing - TEXT consider adding the resource among other Language Resources. Images are visual resources for users to - look at. Text materials are meant to be read and not looked at. - Please select one of the options. + Vyberte z TEXT, VIDEO, ZVUK, OBRAZ, 3D. Pokud zvolíte TEXT, + zvažte, jestli se nejedná o Language Resource (jazykový zdroj). Obrazy jsou vizuální materiály, na které se + uživatelé mohou dívat. Textové materiály jsou určeny ke čtení a nikoli k dívání. + Zvolte prosím jednu z možností @@ -1596,10 +1523,10 @@ title false - + onebox - Enter the main title of the item in English. - You must enter a main title for this item. + Uveďte anglický název tohoto příspěvku. + Musíte uvést název. @@ -1608,11 +1535,9 @@ demo uri false - + onebox - A url with samples of the resource or, in the case of tools, - of samples of the output. - + URL se vzorky dat, v případě nástrojů předvedení výstupu. http.* @@ -1623,9 +1548,9 @@ relation isreferencedby true - + onebox - Link to original paper that references this dataset. + Odkaz na původní článek, který zmiňuje tento záznam. http.* @@ -1636,12 +1561,10 @@ date issued false - + date - Please give the date when the submission data were issued if any e.g., 2014-01-21 or at least - the year. - - You must enter the date or at least the year in a valid format. + Uveďte prosím datum vydání příspěvku, např 2014-01-21 nebo alespoň rok. + Musíte uvést datum v platném formátu. @@ -1650,11 +1573,9 @@ hidden false - + list - Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" - for weblicht submissions. - + Uveďte, má-li být záznam skryt ve vyhledávání a procházení. Pro příspěvky pro weblicht kombinujte s "Nahrát cmdi". policy=deny,action=read,grantee-type=user,grantee-id=* @@ -1667,12 +1588,10 @@ hasMetadata false - + list - Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht - submissions. - + Uveďte, jestli se chystáte v dalším kroku nahrát cmdi soubor. Kombinujte se schováváním záznamů pro weblicht příspěvky. policy=deny,action=read,grantee-type=user,grantee-id=* @@ -1687,13 +1606,10 @@ contributor author true - + clarin-name - Enter the names of the authors of this item. Start typing the author's last name and use - autocomplete form that will appear if applicable. End your input by pressing ESC if you don't - want to use the preselected value. - - Please add author(s) + Uveďte jména autorů tohoto záznamu. Začnete-li vyplňovat příjmení, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Uveďte prosím autora(y) @@ -1702,14 +1618,12 @@ publisher true - + autocomplete - The name of the publisher of the original analog or born - digital object. Use your home institution if this is a born digital object being published now. Start typing the - publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you - don't want to use the preselected value. - - You must enter the name of the publisher. + Uveďte vydavatele analogového originálu, případně + vydavatele "born digital" originálu. Pokud se jedná o právě zrozený zdroj, uveďte vaši domovskou instituci. Začnete-li + vyplňovat vydavatele, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Musíte uvést vydavatele. @@ -1718,11 +1632,11 @@ dataProvider false - + autocomplete - This concerns the digital object (not the analog - original). An institution from which the data come. Used e.g. to give proper attribution. Generally - different from publisher. + Týká se digitálního objektu, nikoliv analogového + originálu. Instituce, od které data pocházejí. Např. pro uvedení původu u některých licencí. Obecně se bude + lišit od vydavatele. @@ -1732,10 +1646,10 @@ contact person true - + complex - Person to contact in case of any issues with this submission. - Please fill all the fields for the contact person. + Osoba, která bude kontaktována v případě problémů s tímto záznamem. + Vyplňte prosím všechna pole u kontaktní osoby @@ -1743,9 +1657,9 @@ local sponsor true - + complex - Acknowledge sponsors and funding that supported work described by this submission. + Uveďte sponzory a zdroje financí podporující vznik práce popsané v tomto příspěvku. @@ -1756,11 +1670,11 @@ type false - + autocomplete - The type should be different from what you have - entered in the first step. Examples: photo or painting for IMAGE, book or letter for TEXT, etc. - Type is required + Typ by měl být odlišný od druhu, který jste zvolili v + prvním kroku. Například: fotografie nebo malba pro druh OBRAZ, kniha nebo dopis pro typ TEXT apod. + Vyplňte typ @@ -1770,25 +1684,30 @@ description false - + textarea - Enter a description of the submitted data. - Please give us a description + Popište nahrávaná data. + Uveďte prosím popis. + dc language iso true - + TEXT autocomplete - Select the language of the main content of the item. Multiple languages are possible. Start - typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + Vyberte jazyky, jichž se data tohoto záznamu týkají. Je možné zvolit více jazyků. Začnete-li psát, + objeví se nápověda. Je lepší vyjmenovat všechny dotčené jazyky (pokud jich je větší množství, kontaktujte podporu), + než používat iso kód 'mul'. - The language is required for TEXT resources + Pro TEXTy je jazyk povinný @@ -1797,12 +1716,13 @@ language iso true - + VIDEO,IMAGE,SOUND,3D autocomplete - Optionally, select the language of the main content - of the item. Multiple languages are possible. Start - typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + Volitelné. Vyberte jazyky, jichž se data tohoto záznamu týkají. Je možné zvolit více jazyků. Začnete-li psát, + objeví se nápověda. Je lepší vyjmenovat všechny dotčené jazyky (pokud jich je větší množství, kontaktujte podporu), + než používat iso kód 'mul'. @@ -1814,17 +1734,16 @@ true - + autocomplete - Enter appropriate subject keyword or phrase and press - the Add button. Use keywords to specify also people, places and times (period, era, date range etc) the resource - is about. You can use hierarchical subjects, separate the hierarchy levels with two colons (::). Eg. - People::John Doe, Places::New York, Times::WWII. - You can repeat it for multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. - Start typing the keyword and use autocomplete form that will appear. End your input by pressing - ESC if you don't want to use the preselected value. + Uveďte vhodná klíčová slova, nebo fráze a zmáčkněte + tlačítko přidat. + Klíčová slova využijte také pro lidi, místa a časy (období, éry, rozsah dat apod.) o kterých záznam je. + Možnost využítvat hierarchické předměty. Oddělte jednotlivé úrovně hierarchie dvěma dvojtečkami (::). + Např. People::Jára Cimrman, Places::Liptákov, Times::počátek 20. století. + Klíčová slova buď přidávejte po jednom, nebo je oddělte čárkou, nebo středníkem. Začnete-li psát, objeví se nápověda. - Please enter at least one subject related to your submission + Uveďte alespoň jedno klíčové slovo. @@ -1833,12 +1752,12 @@ identifier other true - + onebox - The item will get a handle. If the item has any - identification numbers or codes associated with it, please enter the types and the actual numbers or codes. + Pro tento záznam bude vytvořen handle. Pokud má + zdroj přidělený jiný identifikátor, nebo kód, uveďte jej i jeho typ. @@ -1849,9 +1768,30 @@ size info true - + complex - You can state the extent of the submitted data, eg. the number of tokens. + Můžete uvést rozsah nahraných dat, například počet tokenů. + + + + + local + bitstream + redirectToURL + false + + onebox + + Prosím, zadejte úplnou cestu k souboru, ať už z místního serveru, nebo z webové lokace (HTTP). + Maximální velikost nahrávaného souboru je 4 GB. Pokud chcete nahrát soubor větší než tento limit, zadejte jeho URL. + Ujistěte se, že administrátor je obeznámen s URL souboru datového proudu (bitstream). + Jakmile zadáte cestu k souboru nebo URL, klikněte na tlačítko „Uložit“ pro zahájení nahrávání. + Poznámka: Cesta k souboru musí být absolutní. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + @@ -1870,10 +1810,10 @@ title false - + onebox - Enter the main title of the item in English. - You must enter a main title for this item. + Uveďte anglický název tohoto příspěvku. + Musíte uvést název. @@ -1882,9 +1822,9 @@ demo uri false - + onebox - Course homepage + Stránky kurzu http.* @@ -1895,9 +1835,9 @@ relation isreferencedby true - + onebox - Link to original paper that references this dataset. + Odkaz na původní článek, který zmiňuje tento záznam. http.* @@ -1908,12 +1848,10 @@ date issued false - + date - Please give the date when the submission data were issued if any e.g., 2014-01-21 or at least - the year. - - You must enter the date or at least the year in a valid format. + Uveďte prosím datum vydání příspěvku, např 2014-01-21 nebo alespoň rok. + Musíte uvést datum v platném formátu. @@ -1922,11 +1860,9 @@ hidden false - + list - Indicate whether you want to hide this item from browse and search. Combine with "Upload cmdi" - for weblicht submissions. - + Uveďte, má-li být záznam skryt ve vyhledávání a procházení. Pro příspěvky pro weblicht kombinujte s "Nahrát cmdi". policy=deny,action=read,grantee-type=user,grantee-id=* @@ -1939,12 +1875,10 @@ hasMetadata false - + list - Indicate whether you will upload cmdi file in the next step. Combine with "hide" for weblicht - submissions. - + Uveďte, jestli se chystáte v dalším kroku nahrát cmdi soubor. Kombinujte se schováváním záznamů pro weblicht příspěvky. policy=deny,action=read,grantee-type=user,grantee-id=* @@ -1954,25 +1888,22 @@
- + dc contributor author true - + clarin-name - Enter the names of the authors of this item. Start typing the author's last name and use - autocomplete form that will appear if applicable. End your input by pressing ESC if you don't - want to use the preselected value. - - Please add author(s) + Uveďte jména autorů tohoto záznamu. Začnete-li vyplňovat příjmení, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Uveďte prosím autora(y) @@ -1981,14 +1912,12 @@ publisher true - + autocomplete - The name of the publisher of the original analog or born - digital object. Use your home institution if this is a born digital object being published now. Start typing the - publisher and use autocomplete form that will appear if applicable. End your input by pressing ESC if you - don't want to use the preselected value. - - You must enter the name of the publisher. + Uveďte vydavatele analogového originálu, případně + vydavatele "born digital" originálu. Pokud se jedná o právě zrozený zdroj, uveďte vaši domovskou instituci. Začnete-li + vyplňovat vydavatele, objeví se nápověda. Nechcete-li nápovědu využít, stiskněte ESC. + Musíte uvést vydavatele. @@ -1997,10 +1926,10 @@ contact person true - + complex - Person to contact in case of any issues with this submission. - Please fill all the fields for the contact person. + Osoba, která bude kontaktována v případě problémů s tímto záznamem. + Vyplňte prosím všechna pole u kontaktní osoby @@ -2008,9 +1937,9 @@ local sponsor true - + complex - Acknowledge sponsors and funding that supported work described by this submission. + Uveďte sponzory a zdroje financí podporující vznik práce popsané v tomto příspěvku.
@@ -2022,12 +1951,12 @@ type false - + dropdown teachingMaterials - This is here to autofill a value. The value should not be changed. - Please select a resource type for your submission. + Toto pole je zde pro automatické vyplnění hodnoty. Hodnota by neměla být měněna. + Prosím zvolte typ dat pro váš příspěvek.
@@ -2036,10 +1965,10 @@ description false - + textarea - Enter a description of the submitted data. - Please give us a description + Popište nahrávaná data. + Uveďte prosím popis. @@ -2048,12 +1977,13 @@ language iso true - + autocomplete - Select the language of the main content of the item. Multiple languages are possible. Start - typing the language and use autocomplete form that will appear if applicable. Better to list all the languages then to use the 'mul' iso code (if there are too many, contact support). + + Vyberte jazyky, jichž se data tohoto záznamu týkají. Je možné zvolit více jazyků. Začnete-li psát, objeví se nápověda. + Je lepší vyjmenovat všechny dotčené jazyky (pokud jich je větší množství, kontaktujte podporu), než používat iso kód 'mul'. - Please choose a language for the resource. + Prosím zvolte jazyk. @@ -2063,14 +1993,13 @@ true - + autocomplete - Enter appropriate subject keyword or phrase and press the Add button. You can repeat it for - multiple keywords or use separators i.e., comma and semicolon, which will split it accordingly. - Start typing the keyword and use autocomplete form that will appear. End your input by pressing - ESC if you don't want to use the preselected value. + + Uveďte vhodná klíčová slova, nebo fráze a zmáčkněte tlačítko přidat. + Klíčová slova buď přidávejte po jednom, nebo je oddělte čárkou, nebo středníkem. Začnete-li psát, objeví se nápověda. - Please enter at least one subject related to your submission + Uveďte alespoň jedno klíčové slovo. @@ -2079,19 +2008,41 @@ identifier other true - + onebox - The item will get a handle. If the item has any - identification numbers or codes associated with it, please enter the types and the actual numbers or codes. + + Pro tento záznam bude vytvořen handle. Pokud má + zdroj přidělený jiný identifikátor, nebo kód, uveďte jej i jeho typ. + + + local + bitstream + redirectToURL + false + + onebox + + Prosím, zadejte úplnou cestu k souboru, ať už z místního serveru, nebo z webové lokace (HTTP). + Maximální velikost nahrávaného souboru je 4 GB. Pokud chcete nahrát soubor větší než tento limit, zadejte jeho URL. + Ujistěte se, že administrátor je obeznámen s URL souboru datového proudu (bitstream). + Jakmile zadáte cestu k souboru nebo URL, klikněte na tlačítko „Uložit“ pro zahájení nahrávání. + Poznámka: Cesta k souboru musí být absolutní. + + + + policy=deny,action=read,grantee-type=user,grantee-id=* + + + -
+ local @@ -2114,7 +2065,10 @@ true autocomplete - URL příbuzného záznamu, který je tímto záznamem nahrazen. Pokud je příbuzný záznam v tomto repozitáři, začněte psát jeho název, nebo handle a vyberte záznam z nabídky. + + URL příbuzného záznamu, který je tímto záznamem nahrazen. + Pokud je příbuzný záznam v tomto repozitáři, začněte psát jeho název, nebo handle a vyberte záznam z nabídky. + @@ -2126,7 +2080,7 @@ relation isreplacedby true - + autocomplete Příbuzný záznam, který nahrazuje tento. @@ -2156,7 +2110,7 @@ - Yes + Ano true @@ -2750,11 +2704,11 @@ VIDEO - SOUND + ZVUK SOUND - IMAGE + OBRAZ IMAGE @@ -3191,17 +3145,17 @@ - Yes + Ano true - No + Ne false - Hidden + Ano hidden @@ -3219,29 +3173,28 @@ Uncomment the example row of the complex input type definition to see this input in the submission UI. --> - - - - + + + + - - - - - + + + + + - + - + - + \ No newline at end of file From dadd1fccb953581c93f9d1974e6d6afdbc4e297c Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Wed, 27 Nov 2024 08:08:12 +0100 Subject: [PATCH 23/45] Updated cfg to pretify the author suggestions (#819) --- dspace/config/clarin-dspace.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index 708e5c9b1c9b..ebb3fa782c51 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -298,4 +298,5 @@ autocomplete.custom.separator.solr-subject_ac = \\|\\|\\| autocomplete.custom.separator.solr-publisher_ac = \\|\\|\\| autocomplete.custom.separator.solr-dataProvider_ac = \\|\\|\\| autocomplete.custom.separator.solr-dctype_ac = \\|\\|\\| +autocomplete.custom.separator.solr-author_ac = \\|\\|\\| autocomplete.custom.allowed = solr-author_ac,solr-publisher_ac,solr-dataProvider_ac,solr-dctype_ac,solr-subject_ac,solr-handle_title_ac,json_static-iso_langs.json From 0a0466e397b675d0ca054755411cce609f6411d4 Mon Sep 17 00:00:00 2001 From: Paurikova2 <107862249+Paurikova2@users.noreply.github.com> Date: Wed, 27 Nov 2024 13:08:43 +0100 Subject: [PATCH 24/45] crosswalk-embargo (#821) * added fn for embargo * using of res policy end_date and added comments * fix string format problem with %s * integration tests are falling down * checkstyle violations * removed findHandle duplicity * added deleted line * checkstyle violations --- .../VersionedHandleIdentifierProviderIT.java | 3 - .../org/dspace/utils/SpecialItemService.java | 157 ++++++++++++++++++ .../resources/DSpaceResourceResolver.java | 3 +- .../resources/functions/GetAvailableFn.java | 31 ++++ .../functions/StringXSLFunction.java | 1 - .../oai/metadataFormats/datacite_openaire.xsl | 17 +- 6 files changed, 194 insertions(+), 18 deletions(-) create mode 100644 dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/GetAvailableFn.java diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java index 57acf1f1c453..a28a5a4c7508 100644 --- a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java @@ -62,9 +62,6 @@ public void setUp() throws Exception { @Override public void destroy() throws Exception { super.destroy(); - // After this test has finished running, refresh application context and - // set the expected 'default' versioned handle provider back to ensure other tests don't fail - DSpaceServicesFactory.getInstance().getServiceManager().getApplicationContext().refresh(); } private void registerProvider(Class type) { diff --git a/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java b/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java index d62cbc481fcb..c801ccc156a6 100644 --- a/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java +++ b/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java @@ -11,6 +11,10 @@ import java.io.InputStreamReader; import java.io.Reader; +import java.sql.SQLException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.List; import java.util.Objects; import javax.xml.parsers.DocumentBuilder; @@ -18,17 +22,27 @@ import javax.xml.parsers.ParserConfigurationException; import org.dspace.app.util.DCInput; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ClarinServiceFactory; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.ItemService; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.content.service.clarin.ClarinItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; +import org.dspace.xoai.exceptions.InvalidMetadataFieldException; +import org.dspace.xoai.services.impl.DSpaceFieldResolver; import org.springframework.stereotype.Component; import org.w3c.dom.Document; import org.w3c.dom.Element; @@ -267,6 +281,149 @@ public static Node getAuthor(String mdValue) { } } + /** + * Retrieves the earliest available date for an item identified by the given identifier URI. + * This method checks for any embargo date first and then retrieves the "dc.date.available" + * metadata value as a fallback if no embargo date is found. + * + * @param identifierUri The identifier URI of the item whose available date is to be retrieved. + * @return A string representation of the earliest available date, or null if no date is found or an error occurs. + */ + public static String getAvailable(String identifierUri) { + Context context = new Context(); + // Find the metadata field for "dc.identifier.uri" + String mtdField = "dc.identifier.uri"; + MetadataField metadataField = findMetadataField(context, mtdField); + if (Objects.isNull(metadataField)) { + log.error(String.format("Metadata field for %s not found.", mtdField)); + return null; + } + + // Retrieve the item using the handle + ClarinItemService clarinItemService = ClarinServiceFactory.getInstance().getClarinItemService(); + Item item; + try { + List itemList = clarinItemService.findByHandle(context, metadataField, identifierUri); + item = itemList.isEmpty() ? null : itemList.get(0); + } catch (SQLException e) { + log.error("Error retrieving item by handle.", e); + return null; + } + if (Objects.isNull(item)) { + log.error(String.format("Item for handle %s doesn't exist!", identifierUri)); + return null; + } + + // Check if there is an embargo or get the earliest available date + Date startDate = getEmbargoDate(context, item); + if (Objects.isNull(startDate)) { + startDate = getAvailableDate(context, item); + } + return (Objects.nonNull(startDate)) ? startDate.toString() : null; + } + + /** + * Finds the metadata field corresponding to the provided string. + * + * @param context The DSpace context + * @param mtd The metadata field string + * @return The MetadataField object, or null if not found. + */ + private static MetadataField findMetadataField(Context context, String mtd) { + MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); + try { + return metadataFieldService.findByString(context, mtd, '.'); + } catch (SQLException e) { + log.error(String.format("Error finding metadata field %s.", mtd), e); + return null; + } + } + + /** + * Retrieves the embargo start date for the given item bitstreams. + * If an embargo has ended, the end date is returned. + * + * @param context The DSpace context + * @param item The item whose embargo date is to be retrieved. + * @return The start or end date of the embargo, or null if no embargo exists. + */ + private static Date getEmbargoDate(Context context, Item item) { + ResourcePolicyService resPolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + Date startDate = null; + for (Bundle bundle : item.getBundles()) { + for (Bitstream bitstream : bundle.getBitstreams()) { + List resPolList; + try { + resPolList = resPolicyService.find(context, bitstream, Constants.READ); + } catch (SQLException e) { + log.error(String.format("Error during finding resource policies READ for bitstream %s", + bitstream.getID().toString())); + return null; + } + for (ResourcePolicy resPol : resPolList) { + Date date = resPol.getStartDate(); + // If the embargo has already ended, use the date of its end. + if (Objects.nonNull(date) && Objects.nonNull(resPol.getEndDate())) { + date = resPol.getEndDate(); + } + if (Objects.isNull(startDate) || (Objects.nonNull(date) && date.compareTo(startDate) > 0)) { + startDate = date; + } + } + } + } + return startDate; + } + + /** + * Retrieves the available date for the given item by checking the "dc.date.available" metadata. + * + * @param context The DSpace context + * @param item The item whose available date is to be retrieved. + * @return The available date, or null if no available date is found. + */ + private static Date getAvailableDate(Context context, Item item) { + DSpaceFieldResolver dSpaceFieldResolver = new DSpaceFieldResolver(); + List metadataValueList = item.getMetadata(); + String mtdField = "dc.date.available"; + int fieldID; + try { + fieldID = dSpaceFieldResolver.getFieldID(context, mtdField); + } catch (SQLException | InvalidMetadataFieldException e) { + log.error(String.format("Error during finding ID of metadata field %s.", mtdField)); + return null; + } + Date startDate = null; + for (MetadataValue mtd : metadataValueList) { + if (mtd.getMetadataField().getID() == fieldID) { + Date availableDate = parseDate(mtd.getValue()); + if (Objects.isNull(startDate) || (Objects.nonNull(availableDate) + && availableDate.compareTo(startDate) > 0)) { + startDate = availableDate; + } + } + } + return startDate; + } + + /** + * Parses a date string in the format "yyyy-MM-dd" into a Date object. + * + * @param dateString The date string to be parsed. + * @return A Date object representing the parsed date, or null if parsing fails. + */ + private static Date parseDate(String dateString) { + String format = "yyyy-MM-dd"; + SimpleDateFormat dateFormat = new SimpleDateFormat(format); // Example format + dateFormat.setLenient(false); // Set lenient to false to avoid parsing incorrect dates + try { + return dateFormat.parse(dateString); // Attempt to parse the date + } catch (ParseException e) { + log.warn(String.format("Date %s cannot be parsed using the format %s.", dateString, format)); + return null; + } + } + public static boolean hasOwnMetadata(List metadataValues) { if (metadataValues.size() == 1 && metadataValues.get(0).getValue().equalsIgnoreCase("true")) { return true; diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java index 9d4790b9ff47..c0e540b9576c 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java @@ -26,6 +26,7 @@ import org.dspace.xoai.services.impl.resources.functions.BibtexifyFn; import org.dspace.xoai.services.impl.resources.functions.FormatFn; import org.dspace.xoai.services.impl.resources.functions.GetAuthorFn; +import org.dspace.xoai.services.impl.resources.functions.GetAvailableFn; import org.dspace.xoai.services.impl.resources.functions.GetContactFn; import org.dspace.xoai.services.impl.resources.functions.GetFundingFn; import org.dspace.xoai.services.impl.resources.functions.GetLangForCodeFn; @@ -54,7 +55,7 @@ public class DSpaceResourceResolver implements ResourceResolver { new UriToLicenseFn(), new LogMissingMsgFn(), new UriToRestrictionsFn(), new ShortestIdFn(), new GetContactFn(), new GetAuthorFn(), new GetFundingFn(), new GetLangForCodeFn(), new GetPropertyFn(), new GetSizeFn(), new GetUploadedMetadataFn(), new LogMissingFn(), - new BibtexifyFn(), new FormatFn() + new BibtexifyFn(), new FormatFn(), new GetAvailableFn() ); SaxonTransformerFactory saxonTransformerFactory = (SaxonTransformerFactory) transformerFactory; diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/GetAvailableFn.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/GetAvailableFn.java new file mode 100644 index 000000000000..f7843abed51c --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/GetAvailableFn.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.services.impl.resources.functions; + +import org.dspace.utils.SpecialItemService; + +/** + * The GetAvailableFn class extends the StringXSLFunction to provide a custom function + * that retrieves the availability status of an item based on its identifier. + * It uses the SpecialItemService to fetch the available information. + * This function is intended to be used in XSL transformations where the + * "getAvailable" function is called with an item's identifier as a parameter. + * + * @author Michaela Paurikova(michaela.paurikova at dataquest.sk) + */ +public class GetAvailableFn extends StringXSLFunction { + @Override + protected String getFnName() { + return "getAvailable"; + } + + @Override + protected String getStringResult(String param) { + return SpecialItemService.getAvailable(param); + } +} diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/StringXSLFunction.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/StringXSLFunction.java index 163a9eb49ca1..ed260c8b2d4a 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/StringXSLFunction.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/functions/StringXSLFunction.java @@ -74,7 +74,6 @@ final public XdmValue call(XdmValue[] xdmValues) throws SaxonApiException { log.warn("Empty value in call of function of StringXslFunction type"); val = ""; } - return new XdmAtomicValue(checks(getStringResult(val))); } diff --git a/dspace/config/crosswalks/oai/metadataFormats/datacite_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/datacite_openaire.xsl index d6823c6f2c12..64cfda7f20ee 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/datacite_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/datacite_openaire.xsl @@ -141,19 +141,10 @@ - - - - - - - - - - - - - + + + + From 9911fcc1380dbbbb205696561b3e17f5324afc0d Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:26:41 +0100 Subject: [PATCH 25/45] For now the complex input field is without autocomplete for the size and contact person (#823) --- dspace/config/submission-forms.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index b1f0a6b032f6..19aa68c31d78 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -3222,7 +3222,7 @@ - + @@ -3240,7 +3240,7 @@ - From da0590498fe8bda2e469c2afd4f18966d6bb993c Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 28 Nov 2024 08:27:37 +0100 Subject: [PATCH 26/45] Send the custom type bind `field` to the FE configuration (#822) --- .../java/org/dspace/app/util/DCInput.java | 15 +++++++- .../org/dspace/app/util/DCInputsReader.java | 34 ++++++++++++------- .../converter/SubmissionFormConverter.java | 1 + .../rest/model/SubmissionFormFieldRest.java | 14 ++++++++ 4 files changed, 50 insertions(+), 14 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index bc15392a3818..2ea8a8866016 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -169,6 +169,11 @@ public class DCInput { */ private String autocompleteCustom = null; + /** + * the custom field for the type bind + */ + private String typeBindField = null; + /** * the dropdown input type could have defined a default value */ @@ -259,7 +264,7 @@ public DCInput(Map fieldMap, Map> listMap, typeBind = new ArrayList(); String typeBindDef = fieldMap.get("type-bind"); this.insertToTypeBind(typeBindDef); - String typeBindField = fieldMap.get(DCInputsReader.TYPE_BIND_FIELD_ATTRIBUTE); + typeBindField = fieldMap.get(DCInputsReader.TYPE_BIND_FIELD_ATTRIBUTE); this.insertToTypeBind(typeBindField); @@ -741,6 +746,14 @@ public void setAutocompleteCustom(String autocompleteCustom) { this.autocompleteCustom = autocompleteCustom; } + public String getTypeBindField() { + return typeBindField; + } + + public void setTypeBindField(String typeBindField) { + this.typeBindField = typeBindField; + } + /** * Class representing a Map of the ComplexDefinition object * Class is copied from UFAL/CLARIN-DSPACE (https://github.com/ufal/clarin-dspace) and modified by diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java index c6e3701ef003..f327a647db3e 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java @@ -501,6 +501,11 @@ private void processField(String formName, Node n, Map field) handleInputTypeTagName(formName, field, nestedNode, nestedValue); } } + } else if (StringUtils.equals(tagName, "type-bind")) { + String customField = getAttribute(nd, TYPE_BIND_FIELD_ATTRIBUTE); + if (customField != null) { + field.put(TYPE_BIND_FIELD_ATTRIBUTE, customField); + } } } } @@ -547,20 +552,23 @@ private void handleInputTypeTagName(String formName, Map field, } else { field.put(PAIR_TYPE_NAME, pairTypeName); } - } else if (value.equals("complex")) { - String definitionName = getAttribute(nd, COMPLEX_DEFINITION_REF); - if (definitionName == null) { - throw new SAXException("Form " + formName - + ", field " + field.get("dc-element") - + "." + field.get("dc-qualifier") - + " has no linked definition"); - } else { - field.put(COMPLEX_DEFINITION_REF, definitionName); + } else { + if (value.equals("complex")) { + String definitionName = getAttribute(nd, COMPLEX_DEFINITION_REF); + if (definitionName == null) { + throw new SAXException("Form " + formName + + ", field " + field.get("dc-element") + + "." + field.get("dc-qualifier") + + " has no linked definition"); + } else { + field.put(COMPLEX_DEFINITION_REF, definitionName); + } } - } else if (value.equals("autocomplete")) { - String definitionName = getAttribute(nd, AUTOCOMPLETE_CUSTOM); - if (definitionName != null) { - field.put(AUTOCOMPLETE_CUSTOM, definitionName); + if (value.equals("autocomplete")) { + String definitionName = getAttribute(nd, AUTOCOMPLETE_CUSTOM); + if (definitionName != null) { + field.put(AUTOCOMPLETE_CUSTOM, definitionName); + } } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java index 8021e4e0d771..5f62f7cf8ae6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java @@ -163,6 +163,7 @@ private SubmissionFormFieldRest getField(DCInput dcinput, String formName) { if (dcinput.isMetadataField()) { inputField.setSelectableMetadata(selectableMetadata); inputField.setTypeBind(dcinput.getTypeBindList()); + inputField.setTypeBindField(dcinput.getTypeBindField()); inputField.setComplexDefinition(dcinput.getComplexDefinitionJSONString()); inputField.setAutocompleteCustom(dcinput.getAutocompleteCustom()); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java index efafa5927e8a..ee49f29da876 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java @@ -98,6 +98,12 @@ public class SubmissionFormFieldRest { */ private String autocompleteCustom; + /** + * The custom field to type bind. It is used to check that the custom type bind field is defined when + * it is defined in the configuration property `submit.type-bind.field` + */ + private String typeBindField; + /** * Getter for {@link #selectableMetadata} @@ -313,4 +319,12 @@ public String getAutocompleteCustom() { public void setAutocompleteCustom(String autocompleteCustom) { this.autocompleteCustom = autocompleteCustom; } + + public String getTypeBindField() { + return typeBindField; + } + + public void setTypeBindField(String typeBindField) { + this.typeBindField = typeBindField; + } } From 3aec3688428119804385a0d8dd584f07e151b906 Mon Sep 17 00:00:00 2001 From: Paurikova2 <107862249+Paurikova2@users.noreply.github.com> Date: Thu, 28 Nov 2024 10:28:24 +0100 Subject: [PATCH 27/45] fix date converting to string (#825) * fix date converting to string * made const from format * checkstyle --- .../org/dspace/utils/SpecialItemService.java | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java b/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java index c801ccc156a6..6facd49b6c28 100644 --- a/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java +++ b/dspace-oai/src/main/java/org/dspace/utils/SpecialItemService.java @@ -61,6 +61,7 @@ */ public class SpecialItemService { private SpecialItemService() {} + private static final String FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'"; /** log4j logger */ private static final org.apache.logging.log4j.Logger log = org.apache.logging.log4j .LogManager.getLogger(SpecialItemService.class); @@ -319,7 +320,7 @@ public static String getAvailable(String identifierUri) { if (Objects.isNull(startDate)) { startDate = getAvailableDate(context, item); } - return (Objects.nonNull(startDate)) ? startDate.toString() : null; + return (Objects.nonNull(startDate)) ? parseDateToString(startDate) : null; } /** @@ -396,7 +397,7 @@ private static Date getAvailableDate(Context context, Item item) { Date startDate = null; for (MetadataValue mtd : metadataValueList) { if (mtd.getMetadataField().getID() == fieldID) { - Date availableDate = parseDate(mtd.getValue()); + Date availableDate = parseStringToDate(mtd.getValue()); if (Objects.isNull(startDate) || (Objects.nonNull(availableDate) && availableDate.compareTo(startDate) > 0)) { startDate = availableDate; @@ -407,19 +408,28 @@ private static Date getAvailableDate(Context context, Item item) { } /** - * Parses a date string in the format "yyyy-MM-dd" into a Date object. + * Converts date object to string formatted in the pattern. * - * @param dateString The date string to be parsed. + * @param date The date + * @return A string representation of the provided date + */ + private static String parseDateToString(Date date) { + SimpleDateFormat dateFormat = new SimpleDateFormat(FORMAT); + return dateFormat.format(date); + } + + /** + * Parses a date string in the format into a Date object. + * + * @param dateString date string to be parsed. * @return A Date object representing the parsed date, or null if parsing fails. */ - private static Date parseDate(String dateString) { - String format = "yyyy-MM-dd"; - SimpleDateFormat dateFormat = new SimpleDateFormat(format); // Example format - dateFormat.setLenient(false); // Set lenient to false to avoid parsing incorrect dates + private static Date parseStringToDate(String dateString) { + SimpleDateFormat dateFormat = new SimpleDateFormat(FORMAT); try { - return dateFormat.parse(dateString); // Attempt to parse the date + return dateFormat.parse(dateString); } catch (ParseException e) { - log.warn(String.format("Date %s cannot be parsed using the format %s.", dateString, format)); + log.warn(String.format("Date %s cannot be parsed using the format %s.", dateString, FORMAT)); return null; } } From 005c939d87c1cbcd32a91c45d0c56fca2432fb2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Ko=C5=A1arko?= Date: Fri, 29 Nov 2024 09:12:03 +0100 Subject: [PATCH 28/45] cherry-pick clarin v7 into dtq dev (#820) * cherry-picked DataCite related changes from customer/uk * Add a script that adds a file from url to an item intended for large file workflows * Add ways to influence the bitstream name * add more options to specify an item * Expose resourceId (#1134) A BE part of #1127 - this exposes the resourceId so it can be used in the handle mgmt table * fixes ufal/clarin-dspace#1135 - findEpersonByNetId should stop searching when it finds an eperson - moved the `return eperson` inside the for cycle (after eperson non null check). - removed the eperson param (both callers were passing in `null`) --- .../org/dspace/administer/FileDownloader.java | 229 ++++++++++++++++++ .../FileDownloaderConfiguration.java | 73 ++++++ .../clarin/ClarinShibAuthentication.java | 11 +- .../identifier/doi/DataCiteConnector.java | 3 +- .../config/spring/api/scripts.xml | 5 + .../dspace/administer/FileDownloaderIT.java | 110 +++++++++ .../app/rest/converter/HandleConverter.java | 7 + .../org/dspace/app/rest/model/HandleRest.java | 13 + .../clarin/ClarinShibbolethLoginFilter.java | 2 +- dspace/config/clarin-dspace.cfg | 6 +- dspace/config/crosswalks/DIM2DataCite.xsl | 2 +- dspace/config/spring/rest/scripts.xml | 5 + 12 files changed, 455 insertions(+), 11 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/administer/FileDownloader.java create mode 100644 dspace-api/src/main/java/org/dspace/administer/FileDownloaderConfiguration.java create mode 100644 dspace-api/src/test/java/org/dspace/administer/FileDownloaderIT.java diff --git a/dspace-api/src/main/java/org/dspace/administer/FileDownloader.java b/dspace-api/src/main/java/org/dspace/administer/FileDownloader.java new file mode 100644 index 000000000000..fb592627adef --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/FileDownloader.java @@ -0,0 +1,229 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +import org.apache.commons.cli.ParseException; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.BitstreamFormat; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamFormatService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.identifier.IdentifierNotFoundException; +import org.dspace.identifier.IdentifierNotResolvableException; +import org.dspace.identifier.factory.IdentifierServiceFactory; +import org.dspace.identifier.service.IdentifierService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.utils.DSpace; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +public class FileDownloader extends DSpaceRunnable { + + private static final Logger log = LoggerFactory.getLogger(FileDownloader.class); + private boolean help = false; + private UUID itemUUID; + private int workspaceID; + private String pid; + private URI uri; + private String epersonMail; + private String bitstreamName; + private EPersonService epersonService; + private ItemService itemService; + private WorkspaceItemService workspaceItemService; + private IdentifierService identifierService; + private BitstreamService bitstreamService; + private BitstreamFormatService bitstreamFormatService; + private final HttpClient httpClient = HttpClient.newBuilder() + .followRedirects(HttpClient.Redirect.NORMAL) + .build(); + + /** + * This method will return the Configuration that the implementing DSpaceRunnable uses + * + * @return The {@link ScriptConfiguration} that this implementing DspaceRunnable uses + */ + @Override + public FileDownloaderConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("file-downloader", + FileDownloaderConfiguration.class); + } + + /** + * This method has to be included in every script and handles the setup of the script by parsing the CommandLine + * and setting the variables + * + * @throws ParseException If something goes wrong + */ + @Override + public void setup() throws ParseException { + log.debug("Setting up {}", FileDownloader.class.getName()); + if (commandLine.hasOption("h")) { + help = true; + return; + } + + if (!commandLine.hasOption("u")) { + throw new ParseException("No URL option has been provided"); + } + + if (!commandLine.hasOption("i") && !commandLine.hasOption("w") && !commandLine.hasOption("p")) { + throw new ParseException("No item id option has been provided"); + } + + if (getEpersonIdentifier() == null && !commandLine.hasOption("e")) { + throw new ParseException("No eperson option has been provided"); + } + + + this.epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + this.workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + this.bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + this.bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService(); + this.identifierService = IdentifierServiceFactory.getInstance().getIdentifierService(); + + try { + uri = new URI(commandLine.getOptionValue("u")); + } catch (URISyntaxException e) { + throw new ParseException("The provided URL is not a valid URL"); + } + + if (commandLine.hasOption("i")) { + itemUUID = UUID.fromString(commandLine.getOptionValue("i")); + } else if (commandLine.hasOption("w")) { + workspaceID = Integer.parseInt(commandLine.getOptionValue("w")); + } else if (commandLine.hasOption("p")) { + pid = commandLine.getOptionValue("p"); + } + + epersonMail = commandLine.getOptionValue("e"); + + if (commandLine.hasOption("n")) { + bitstreamName = commandLine.getOptionValue("n"); + } + } + + /** + * This method has to be included in every script and this will be the main execution block for the script that'll + * contain all the logic needed + * + * @throws Exception If something goes wrong + */ + @Override + public void internalRun() throws Exception { + log.debug("Running {}", FileDownloader.class.getName()); + if (help) { + printHelp(); + return; + } + + Context context = new Context(); + context.setCurrentUser(getEperson(context)); + + //find the item by the given id + Item item = findItem(context); + if (item == null) { + throw new IllegalArgumentException("No item found for the given ID"); + } + + HttpRequest request = HttpRequest.newBuilder() + .uri(uri) + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream()); + + if (response.statusCode() >= 400) { + throw new IllegalArgumentException("The provided URL returned a status code of " + response.statusCode()); + } + + //use the provided value, the content-disposition header, the last part of the uri + if (bitstreamName == null) { + bitstreamName = response.headers().firstValue("Content-Disposition") + .filter(value -> value.contains("filename=")).flatMap(value -> Stream.of(value.split(";")) + .filter(v -> v.contains("filename=")) + .findFirst() + .map(fvalue -> fvalue.replaceFirst("filename=", "").replaceAll("\"", ""))) + .orElse(uri.getPath().substring(uri.getPath().lastIndexOf('/') + 1)); + } + + try (InputStream is = response.body()) { + saveFileToItem(context, item, is, bitstreamName); + } + + context.commit(); + } + + private Item findItem(Context context) throws SQLException { + if (itemUUID != null) { + return itemService.find(context, itemUUID); + } else if (workspaceID != 0) { + return workspaceItemService.find(context, workspaceID).getItem(); + } else { + try { + DSpaceObject dso = identifierService.resolve(context, pid); + if (dso instanceof Item) { + return (Item) dso; + } else { + throw new IllegalArgumentException("The provided identifier does not resolve to an item"); + } + } catch (IdentifierNotFoundException | IdentifierNotResolvableException e) { + throw new IllegalArgumentException(e); + } + } + } + + private void saveFileToItem(Context context, Item item, InputStream is, String name) + throws SQLException, AuthorizeException, IOException { + log.debug("Saving file to item {}", item.getID()); + List originals = item.getBundles("ORIGINAL"); + Bitstream b; + if (originals.isEmpty()) { + b = itemService.createSingleBitstream(context, is, item); + } else { + Bundle bundle = originals.get(0); + b = bitstreamService.create(context, bundle, is); + } + b.setName(context, name); + //now guess format of the bitstream + BitstreamFormat bf = bitstreamFormatService.guessFormat(context, b); + b.setFormat(context, bf); + } + + private EPerson getEperson(Context context) throws SQLException { + if (getEpersonIdentifier() != null) { + return epersonService.find(context, getEpersonIdentifier()); + } else { + return epersonService.findByEmail(context, epersonMail); + } + } +} + diff --git a/dspace-api/src/main/java/org/dspace/administer/FileDownloaderConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/FileDownloaderConfiguration.java new file mode 100644 index 000000000000..848b2d99f7c0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/FileDownloaderConfiguration.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import org.apache.commons.cli.OptionGroup; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +public class FileDownloaderConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + /** + * Generic getter for the dspaceRunnableClass + * + * @return the dspaceRunnableClass value of this ScriptConfiguration + */ + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + /** + * The getter for the options of the Script + * + * @return the options value of this ScriptConfiguration + */ + @Override + public Options getOptions() { + if (options == null) { + + Options options = new Options(); + OptionGroup ids = new OptionGroup(); + + options.addOption("h", "help", false, "help"); + + options.addOption("u", "url", true, "source url"); + options.getOption("u").setRequired(true); + + options.addOption("i", "uuid", true, "item uuid"); + options.addOption("w", "wsid", true, "workspace id"); + options.addOption("p", "pid", true, "item pid (e.g. handle or doi)"); + ids.addOption(options.getOption("i")); + ids.addOption(options.getOption("w")); + ids.addOption(options.getOption("p")); + ids.setRequired(true); + + options.addOption("e", "eperson", true, "eperson email"); + options.getOption("e").setRequired(false); + + options.addOption("n", "name", true, "name of the file/bitstream"); + options.getOption("n").setRequired(false); + + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java index 53adf9d79b8c..ba5d8cd65bfa 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/clarin/ClarinShibAuthentication.java @@ -565,7 +565,7 @@ protected EPerson findEPerson(Context context, HttpServletRequest request, Strin // 1) First, look for a netid header. if (netidHeaders != null) { - eperson = findEpersonByNetId(netidHeaders, shibheaders, eperson, ePersonService, context, true); + eperson = findEpersonByNetId(netidHeaders, shibheaders, ePersonService, context, true); if (eperson != null) { foundNetID = true; } @@ -1318,7 +1318,7 @@ public String getEmailAcceptedOrNull(String email) { /** * Find an EPerson by a NetID header. The method will go through all the netid headers and try to find a user. */ - public static EPerson findEpersonByNetId(String[] netidHeaders, ShibHeaders shibheaders, EPerson eperson, + public static EPerson findEpersonByNetId(String[] netidHeaders, ShibHeaders shibheaders, EPersonService ePersonService, Context context, boolean logAllowed) throws SQLException { // Go through all the netid headers and try to find a user. It could be e.g., `eppn`, `persistent-id`,.. @@ -1329,19 +1329,20 @@ public static EPerson findEpersonByNetId(String[] netidHeaders, ShibHeaders shib continue; } - eperson = ePersonService.findByNetid(context, netid); + EPerson eperson = ePersonService.findByNetid(context, netid); if (eperson == null && logAllowed) { log.info( "Unable to identify EPerson based upon Shibboleth netid header: '" + netidHeader + "'='" + netid + "'."); - } else { + } else if (eperson != null) { log.debug( "Identified EPerson based upon Shibboleth netid header: '" + netidHeader + "'='" + netid + "'" + "."); + return eperson; } } - return eperson; + return null; } /** diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java index 57136d6143bb..62e8e46a49dd 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java @@ -438,10 +438,11 @@ public void reserveDOI(Context context, DSpaceObject dso, String doi) return; } // 400 -> invalid XML + case (422): case (400): { log.warn("DataCite was unable to understand the XML we send."); log.warn("DataCite Metadata API returned a http status code " - + "400: " + resp.getContent()); + + resp.getStatusCode() + ": " + resp.getContent()); Format format = Format.getCompactFormat(); format.setEncoding("UTF-8"); XMLOutputter xout = new XMLOutputter(format); diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index e388065b68fd..738e11f7b432 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -91,4 +91,9 @@ + + + + + diff --git a/dspace-api/src/test/java/org/dspace/administer/FileDownloaderIT.java b/dspace-api/src/test/java/org/dspace/administer/FileDownloaderIT.java new file mode 100644 index 000000000000..ee75fddc57e4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/administer/FileDownloaderIT.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.mockserver.junit.MockServerRule; + + +public class FileDownloaderIT extends AbstractIntegrationTestWithDatabase { + + @Rule + public MockServerRule mockServerRule = new MockServerRule(this); + + private Item item; + + //Prepare a community and a collection before the test + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.setCurrentUser(admin); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection).withTitle("FileDownloaderIT Item").build(); + + mockServerRule.getClient().when(request() + .withMethod("GET") + .withPath("/test400") + ).respond( + response() + .withStatusCode(400) + .withBody("test") + ); + + mockServerRule.getClient().when(request() + .withMethod("GET") + .withPath("/test") + ).respond( + response() + .withStatusCode(200) + .withHeader("Content-Disposition", "attachment; filename=\"test.txt\"") + .withBody("test") + ); + } + + //Test that when an error occurs no bitstream is actually added to the item + @Test() + public void testDownloadFileError() throws Exception { + + + BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + int oldBitCount = bitstreamService.countTotal(context); + + int port = mockServerRule.getPort(); + String[] args = new String[]{"file-downloader", "-i", item.getID().toString(), + "-u", String.format("http://localhost:%s/test400", port), "-e", "admin@email.com"}; + try { + runDSpaceScript(args); + } catch (IllegalArgumentException e) { + assertEquals(0, item.getBundles().size()); + int newBitCount = bitstreamService.countTotal(context); + assertEquals(oldBitCount, newBitCount); + return; + } + assertEquals("Not expecting to get here", 0, 1); + } + + + //Test that FileDownlaoder actually adds the bitstream to the item + @Test + public void testDownloadFile() throws Exception { + + int port = mockServerRule.getPort(); + String[] args = new String[] {"file-downloader", "-i", item.getID().toString(), + "-u", String.format("http://localhost:%s/test", port), "-e", "admin@email.com"}; + runDSpaceScript(args); + + + assertEquals(1, item.getBundles().size()); + List bs = item.getBundles().get(0).getBitstreams(); + assertEquals(1, bs.size()); + assertNotNull("Expecting name to be defined", bs.get(0).getName()); + + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HandleConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HandleConverter.java index dbc40a21cb60..ceb6409429a1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HandleConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HandleConverter.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.converter; +import java.util.Objects; + import org.dspace.app.rest.model.HandleRest; import org.dspace.app.rest.projection.Projection; import org.dspace.handle.Handle; @@ -35,6 +37,11 @@ public HandleRest convert(Handle modelObject, Projection projection) { handleRest.setHandle(modelObject.getHandle()); handleRest.setResourceTypeID(modelObject.getResourceTypeId()); handleRest.setUrl(modelObject.getUrl()); + if (Objects.nonNull(modelObject.getDSpaceObject())) { + handleRest.setResourceId(modelObject.getDSpaceObject().getID()); + } else { + handleRest.setResourceId(null); + } return handleRest; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/HandleRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/HandleRest.java index 6bb989eff09d..c40c9faa7017 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/HandleRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/HandleRest.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest.model; +import java.util.UUID; + import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import org.dspace.app.rest.RestResourceController; @@ -29,6 +31,8 @@ public class HandleRest extends BaseObjectRest { private String url; + private UUID resourceId; + public String getHandle() { return handle; } @@ -41,6 +45,10 @@ public String getUrl() { return url; } + public UUID getResourceId() { + return resourceId; + } + public void setHandle(String handle) { this.handle = handle; } @@ -53,6 +61,11 @@ public void setUrl(String url) { this.url = url; } + public void setResourceId(UUID resourceId) { + this.resourceId = resourceId; + + } + @Override public String getCategory() { return CATEGORY; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java index 774cdcd80850..053ddd6915ee 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/clarin/ClarinShibbolethLoginFilter.java @@ -167,7 +167,7 @@ public Authentication attemptAuthentication(HttpServletRequest req, EPerson ePerson = null; try { - ePerson = ClarinShibAuthentication.findEpersonByNetId(shib_headers.getNetIdHeaders(), shib_headers, ePerson, + ePerson = ClarinShibAuthentication.findEpersonByNetId(shib_headers.getNetIdHeaders(), shib_headers, ePersonService, context, false); } catch (SQLException e) { // It is logged in the ClarinShibAuthentication class. diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index ebb3fa782c51..14b972d7d295 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -209,13 +209,13 @@ identifier.doi.namespaceseparator = dspace/ ## crosswalk.dissemination.DataCite.stylesheet = crosswalks/DIM2DataCite.xsl crosswalk.dissemination.DataCite.schemaLocation = \ - http://datacite.org/schema/kernel-3 \ - http://schema.datacite.org/meta/kernel-3/metadata.xsd + http://datacite.org/schema/kernel-4 \ + https://schema.datacite.org/meta/kernel-4.5/metadata.xsd crosswalk.dissemination.DataCite.preferList = false crosswalk.dissemination.DataCite.publisher = My University #crosswalk.dissemination.DataCite.dataManager = # defaults to publisher #crosswalk.dissemination.DataCite.hostingInstitution = # defaults to publisher -crosswalk.dissemination.DataCite.namespace = http://datacite.org/schema/kernel-3 +crosswalk.dissemination.DataCite.namespace = http://datacite.org/schema/kernel-4 # consumer to update metadata of DOIs event.consumer.doi.class = org.dspace.identifier.doi.DOIConsumer diff --git a/dspace/config/crosswalks/DIM2DataCite.xsl b/dspace/config/crosswalks/DIM2DataCite.xsl index 51fef2275c40..fe6622a1f5fb 100644 --- a/dspace/config/crosswalks/DIM2DataCite.xsl +++ b/dspace/config/crosswalks/DIM2DataCite.xsl @@ -48,7 +48,7 @@ --> + xsi:schemaLocation="http://datacite.org/schema/kernel-4 https://schema.datacite.org/meta/kernel-4.5/metadata.xsd"> From 3486d2da1c304f1d1c374cd100fae42d21f01698 Mon Sep 17 00:00:00 2001 From: jurinecko <95219754+jr-rk@users.noreply.github.com> Date: Fri, 29 Nov 2024 11:08:36 +0100 Subject: [PATCH 30/45] UFAL/Matomo statistics with dimension (#813) * Updated the version of matomo dependency and tried to change request from Custom Variables to Dimension * Added a custom dimension with item's handle URL * Send custom dimension also in oai tracker * Use only IPv4 address, the Matomo tracker has a problem with IPv6 * Do not change custom dimension when the Item is null * First custom dimension should have ID '1'. * Use a valid URL for Matomo tracker in the IT * Configure handle custom dimension ID in the clarin-dspace.cfg * Refactored ipv4 method to be more readable - return null --------- Co-authored-by: Juraj Roka Co-authored-by: milanmajchrak Co-authored-by: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> --- dspace-api/pom.xml | 4 +- .../clarin/ClarinMatomoBitstreamTracker.java | 7 ++- .../clarin/ClarinMatomoTracker.java | 52 +++++++++++-------- .../test/data/dspaceFolder/config/local.cfg | 4 ++ dspace/config/clarin-dspace.cfg | 1 + 5 files changed, 44 insertions(+), 24 deletions(-) diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index c7b93f6ac377..6ac36d129ade 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -339,8 +339,8 @@ org.piwik.java.tracking - matomo-java-tracker - 2.0 + matomo-java-tracker-java11 + 3.4.0 org.apache.logging.log4j diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoBitstreamTracker.java b/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoBitstreamTracker.java index 838a2650586a..5b90ab7fc740 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoBitstreamTracker.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoBitstreamTracker.java @@ -9,6 +9,7 @@ import java.sql.SQLException; import java.text.MessageFormat; +import java.util.LinkedHashMap; import java.util.List; import java.util.Objects; import javax.servlet.http.HttpServletRequest; @@ -69,7 +70,6 @@ public ClarinMatomoBitstreamTracker() { @Override protected void preTrack(Context context, MatomoRequest matomoRequest, Item item, HttpServletRequest request) { super.preTrack(context, matomoRequest, item, request); - matomoRequest.setSiteId(siteId); log.debug("Logging to site " + matomoRequest.getSiteId()); String itemIdentifier = getItemIdentifier(item); @@ -82,6 +82,11 @@ protected void preTrack(Context context, MatomoRequest matomoRequest, Item item, } try { matomoRequest.setPageCustomVariable(new CustomVariable("source", "bitstream"), 1); + // Add the Item handle into the request as a custom dimension + LinkedHashMap handleDimension = new LinkedHashMap<>(); + handleDimension.put(configurationService.getLongProperty("matomo.custom.dimension.handle.id", + 1L), item.getHandle()); + matomoRequest.setDimensions(handleDimension); } catch (MatomoException e) { log.error(e); } diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoTracker.java b/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoTracker.java index 360b4efe8e93..34615bc2ed24 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoTracker.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/clarin/ClarinMatomoTracker.java @@ -7,14 +7,14 @@ */ package org.dspace.app.statistics.clarin; +import java.net.InetAddress; +import java.net.UnknownHostException; import java.util.Calendar; import java.util.Objects; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; +import java.util.concurrent.CompletableFuture; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpResponse; import org.apache.logging.log4j.Logger; import org.dspace.content.Item; import org.dspace.content.factory.ClarinServiceFactory; @@ -23,6 +23,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; import org.matomo.java.tracking.MatomoException; import org.matomo.java.tracking.MatomoRequest; +import org.matomo.java.tracking.parameters.AcceptLanguage; /** * The statistics Tracker for Matomo. This class prepare and send the track GET request to the `/matomo.php` @@ -99,13 +100,13 @@ protected MatomoRequest createMatomoRequest(HttpServletRequest request, String p */ protected void preTrack(Context context, MatomoRequest matomoRequest, Item item, HttpServletRequest request) { if (StringUtils.isNotBlank(request.getHeader("referer"))) { - matomoRequest.setHeaderUserAgent(request.getHeader("referer")); + matomoRequest.setReferrerUrl(request.getHeader("referer")); } if (StringUtils.isNotBlank(request.getHeader("user-agent"))) { matomoRequest.setHeaderUserAgent(request.getHeader("user-agent")); } if (StringUtils.isNotBlank(request.getHeader("accept-language"))) { - matomoRequest.setHeaderUserAgent(request.getHeader("accept-language")); + matomoRequest.setHeaderAcceptLanguage(AcceptLanguage.fromHeader(request.getHeader("accept-language"))); } // Creating a calendar using getInstance method @@ -134,18 +135,13 @@ protected void preTrack(Context context, MatomoRequest matomoRequest, Item item, * @param matomoRequest prepared MatomoRequest for sending */ public void sendTrackingRequest(MatomoRequest matomoRequest) { - try { - Future response = tracker.sendRequestAsync(matomoRequest); - // usually not needed: - HttpResponse httpResponse = response.get(); - int statusCode = httpResponse.getStatusLine().getStatusCode(); - if (statusCode > 399) { - // problem - log.error("Matomo tracker error the response has status code: " + statusCode); + CompletableFuture completableFuture = tracker.sendRequestAsync(matomoRequest); + + completableFuture.whenComplete((result, exception) -> { + if (exception != null) { + log.error("Matomo tracker error - the response exception message: {}", exception.getMessage()); } - } catch (ExecutionException | InterruptedException e) { - e.printStackTrace(); - } + }); } protected String getFullURL(HttpServletRequest request) { @@ -164,21 +160,35 @@ protected String getFullURL(HttpServletRequest request) { } /** - * Get IpAddress of the current user which throws this statistic event + * Get IpAddress of the current user which throws this statistic event. Return only the first valid IPv4 address + * because the Matomo tracker has a problem with IPv6 addresses. * * @param request current request - * @return + * @return only the first valid IPv4 address */ protected String getIpAddress(HttpServletRequest request) { - String ip = ""; String header = request.getHeader("X-Forwarded-For"); if (header == null) { header = request.getRemoteAddr(); } if (header != null) { String[] ips = header.split(", "); - ip = ips.length > 0 ? ips[0] : ""; + for (String candidateIp : ips) { + // Validate if it's an IPv4 address + if (isIPv4Address(candidateIp)) { + return candidateIp; + } + } + } + return null; + } + + private boolean isIPv4Address(String ip) { + try { + InetAddress inetAddress = InetAddress.getByName(ip); + return inetAddress.getHostAddress().equals(ip) && inetAddress instanceof java.net.Inet4Address; + } catch (UnknownHostException e) { + return false; // Not a valid IP address } - return ip; } } diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 6b31cfc98123..14ff9e3a72a3 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -312,6 +312,10 @@ webui.supported.locales = en # When title is something like "Type-bind test" the type-bind field will popped up submit.type-bind.field = dc.type,dc.identifier.citation=>dc.title +# The configuration for the Matomo tracker must have a valid URL, as it will throw an exception if it does not. +matomo.tracker.host.url = http://localhost:8135/matomo.php + autocomplete.custom.separator.solr-subject_ac = \\|\\|\\| autocomplete.custom.separator.solr-title_ac = \\|\\|\\| autocomplete.custom.allowed = solr-author_ac,solr-publisher_ac,solr-dataProvider_ac,solr-dctype_ac,solr-subject_ac,solr-handle_title_ac,json_static-iso_langs.json,solr-title_ac + diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index 14b972d7d295..b6dbcef3ca11 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -155,6 +155,7 @@ matomo.site.id = 1 matomo.tracker.bitstream.site_id = 1 matomo.tracker.oai.site_id = 1 matomo.tracker.host.url = http://url:port/matomo.php +matomo.custom.dimension.handle.id = 1 statistics.cache-server.uri = http://cache-server.none #### Statistic usage reports #### From 3ec13b2250bfc99dbe0831a369d2db12223e1779 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 5 Dec 2024 17:20:38 +0100 Subject: [PATCH 31/45] The `dspace.name.short` is not working properly in the email, use `dspace.shortname` instead (#828) --- .../org/dspace/app/rest/ClarinAutoRegistrationController.java | 2 +- .../app/rest/repository/ClarinUserMetadataRestController.java | 2 +- dspace/config/clarin-dspace.cfg | 2 +- dspace/config/emails/clarin_download_link_admin | 4 ++-- dspace/config/emails/share_submission | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java index eef3b8495699..3ef39cc9794c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ClarinAutoRegistrationController.java @@ -89,7 +89,7 @@ public ResponseEntity sendEmail(HttpServletRequest request, HttpServletResponse String helpDeskEmail = configurationService.getProperty("lr.help.mail", ""); String helpDeskPhoneNum = configurationService.getProperty("lr.help.phone", ""); String dspaceName = configurationService.getProperty("dspace.name", ""); - String dspaceNameShort = configurationService.getProperty("dspace.name.short", ""); + String dspaceNameShort = configurationService.getProperty("dspace.shortname", ""); if (StringUtils.isEmpty(uiUrl)) { log.error("Cannot load the `dspace.ui.url` property from the cfg."); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java index 84a6c9b00890..05f1c34902dd 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ClarinUserMetadataRestController.java @@ -283,7 +283,7 @@ private void sendEmailWithDownloadLink(Context context, DSpaceObject dso, String helpDeskEmail = configurationService.getProperty("lr.help.mail", ""); String helpDeskPhoneNum = configurationService.getProperty("lr.help.phone", ""); String dspaceName = configurationService.getProperty("dspace.name", ""); - String dspaceNameShort = configurationService.getProperty("dspace.name.short", ""); + String dspaceNameShort = configurationService.getProperty("dspace.shortname", ""); if (StringUtils.isEmpty(uiUrl)) { log.error("Cannot load the `dspace.ui.url` property from the cfg."); diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index b6dbcef3ca11..f27d6830c213 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -5,7 +5,7 @@ #------------------------------------------------------------------# #---------------------------DSpace---------------------------------# #------------------------------------------------------------------# -dspace.name.short = DSpace +dspace.shortname = DSpace dspace.name = CLARIN DSpace #------------------------------------------------------------------# diff --git a/dspace/config/emails/clarin_download_link_admin b/dspace/config/emails/clarin_download_link_admin index 3119ee134fa1..3288ea422d9f 100644 --- a/dspace/config/emails/clarin_download_link_admin +++ b/dspace/config/emails/clarin_download_link_admin @@ -9,7 +9,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = "${config.get('dspace.name.short')}: New File Download Request (CC)") +#set($subject = "${config.get('dspace.shortname')}: New File Download Request (CC)") This is an information for administrators and other configured people about a new download request. @@ -26,7 +26,7 @@ The file is distributed under specific license: Extra information filled by the user: ${params[5]} -${config.get('dspace.name.short')} Team +${config.get('dspace.shortname')} Team _____________________________________ ${config.get('dspace.name')}, diff --git a/dspace/config/emails/share_submission b/dspace/config/emails/share_submission index 884d183726d1..6a930dee95f3 100644 --- a/dspace/config/emails/share_submission +++ b/dspace/config/emails/share_submission @@ -12,7 +12,7 @@ If you have trouble please contact ${config.get('lr.help.mail')} or call us at ${config.get('lr.help.phone')} -${config.get('dspace.name.short')} Team +${config.get('dspace.shortname')} Team _____________________________________ ${config.get('dspace.name')}, From 56b4862c2240fe0fdbf601d50ff9f22da8e17571 Mon Sep 17 00:00:00 2001 From: milanmajchrak Date: Fri, 6 Dec 2024 15:37:21 +0100 Subject: [PATCH 32/45] Assinging to JAVA_OPTS_HANDLE must be without $ --- dspace/bin/start-handle-server | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/bin/start-handle-server b/dspace/bin/start-handle-server index 06c4fb5d90c9..d0c929b251ec 100755 --- a/dspace/bin/start-handle-server +++ b/dspace/bin/start-handle-server @@ -25,7 +25,7 @@ LOGDIR=$DSPACEDIR/log #Allow user to specify java options through JAVA_OPTS variable if [ "$JAVA_OPTS_HANDLE" = "" ]; then #Default Java to use 256MB of memory - $JAVA_OPTS_HANDLE=-Xmx256m + JAVA_OPTS_HANDLE=-Xmx256m fi # Remove lock file, in case the old Handle server did not shut down properly From dc57ffded8a185b5d1ff190aa1c8ed3512056168 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 16 Jan 2025 17:44:00 +0100 Subject: [PATCH 33/45] Transaction bug - close context in finally block (#845) (#852) * Transaction bug - see differencies (#845) * Logging hibernate statistics doesn't work as expected. * Undo connection pool timeout contants * Removed a huge amount of log.info * Added abort to try catch blocks to check if it will solve IT * Call aborting the context in the finally block of the StatelessAuthenticationFilter --------- Co-authored-by: jm * Empty commit to run actions * Added a warning message then the transaction is alive during initializing a new Context. * Updated the `actions/upload-artifact` to v4 becausa of `https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/` * Show warning when the transaction is alive when the dbConnection is created --------- Co-authored-by: jm --- .github/workflows/build.yml | 4 +-- .../main/java/org/dspace/core/Context.java | 35 +++++++++++++++++++ .../dspace/core/HibernateDBConnection.java | 8 ----- .../src/main/java/org/dspace/core/Utils.java | 25 +++++++++++++ .../StatelessAuthenticationFilter.java | 18 +++++++++- dspace/config/log4j2.xml | 6 ++-- 6 files changed, 83 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 59cf1935fb64..c58d6dc73c48 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -79,14 +79,14 @@ jobs: # (This artifact is downloadable at the bottom of any job's summary page) - name: Upload Results of ${{ matrix.type }} to Artifact if: ${{ failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} # Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below) - name: Upload code coverage report to Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ matrix.type }} coverage report path: 'dspace/target/site/jacoco-aggregate/jacoco.xml' diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index 8eed24348c39..c73dd9a688e1 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -169,6 +169,10 @@ public Context(Mode mode) { * Initializes a new context object. */ protected void init() { + if (log.isDebugEnabled()) { + log.debug("Initializing new context with hash: {}.", getHash()); + } + updateDatabase(); if (eventService == null) { @@ -181,6 +185,11 @@ protected void init() { if (dbConnection == null) { log.fatal("Cannot obtain the bean which provides a database connection. " + "Check previous entries in the dspace.log to find why the db failed to initialize."); + } else { + if (isTransactionAlive()) { + log.warn("Initializing a context while an active transaction exists. Context with hash: {}.", + getHash()); + } } } @@ -392,6 +401,10 @@ public String getExtraLogInfo() { * or closing the connection */ public void complete() throws SQLException { + if (log.isDebugEnabled()) { + log.debug("Completing context with hash: {}.", getHash()); + } + // If Context is no longer open/valid, just note that it has already been closed if (!isValid()) { log.info("complete() was called on a closed Context object. No changes to commit."); @@ -424,6 +437,10 @@ public void complete() throws SQLException { * @throws SQLException When committing the transaction in the database fails. */ public void commit() throws SQLException { + if (log.isDebugEnabled()) { + log.debug("Committing context with hash: {}.", getHash()); + } + // If Context is no longer open/valid, just note that it has already been closed if (!isValid()) { log.info("commit() was called on a closed Context object. No changes to commit."); @@ -556,6 +573,10 @@ public Event pollEvent() { * @throws SQLException When rollbacking the transaction in the database fails. */ public void rollback() throws SQLException { + if (log.isDebugEnabled()) { + log.debug("Rolling back context with hash: {}.", getHash()); + } + // If Context is no longer open/valid, just note that it has already been closed if (!isValid()) { log.info("rollback() was called on a closed Context object. No changes to abort."); @@ -584,6 +605,10 @@ public void rollback() throws SQLException { * is a no-op. */ public void abort() { + if (log.isDebugEnabled()) { + log.debug("Aborting context with hash: {}.", getHash()); + } + // If Context is no longer open/valid, just note that it has already been closed if (!isValid()) { log.info("abort() was called on a closed Context object. No changes to abort."); @@ -617,6 +642,9 @@ public void abort() { */ @Override public void close() { + if (log.isDebugEnabled()) { + log.debug("Closing context with hash: {}.", getHash()); + } if (isValid()) { abort(); } @@ -988,4 +1016,11 @@ public String getHibernateStatistics() { } return "Hibernate statistics are not available for this database connection"; } + + /** + * Get the hash of the context object. This hash is based on the memory address of the object. + */ + public String getHash() { + return String.valueOf(System.identityHashCode(this)); + } } diff --git a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java index f8c620380d5f..d154287e4a90 100644 --- a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java @@ -35,7 +35,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.orm.hibernate5.SessionFactoryUtils; -import org.springframework.scheduling.annotation.Scheduled; /** * Hibernate implementation of the DBConnection. @@ -108,13 +107,6 @@ protected Transaction getTransaction() { return sessionFactory.getCurrentSession().getTransaction(); } - // This method will run every 10 seconds - @Scheduled(fixedRate = 10000) // Fixed rate in milliseconds - public void logConnectionMetrics() { - logHibernateStatistics(); - logDatabaseMetaData(); - } - /** * Check if Hibernate Session is still "alive" / open. An open Session may or may not have an open Transaction * (so isTransactionAlive() may return false even if isSessionAlive() returns true). A Session may be reused for diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index 6831f45b5c51..3488ebd275ab 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -41,6 +41,9 @@ import org.apache.logging.log4j.Logger; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.hibernate.Session; +import org.hibernate.SessionFactory; +import org.hibernate.query.NativeQuery; /** * Utility functions for DSpace. @@ -523,4 +526,26 @@ public static String replaceLast(String input, String toReplace, String replacem return input.substring(0, lastIndex) + replacement + input.substring(lastIndex + toReplace.length()); } + + /** + * Get the current transaction's PID from PostgreSQL + + * @return PID of the current transaction + */ + public static Integer getTransactionPid(SessionFactory sessionFactory) { + Integer pid = -1; + try { + Session session = sessionFactory.getCurrentSession(); // Get the current session + String sql = "SELECT pg_backend_pid()"; // SQL query to get the PID + + // Execute the query and get the PID + NativeQuery query = session.createNativeQuery(sql); + pid = query.getSingleResult(); // Get the single result + + log.info("Current transaction PID: " + pid); // Optional logging + } catch (Exception e) { + log.error("Cannot get PID because: " + e.getMessage()); + } + return pid; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java index 964d35f42c34..6d2139b26849 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java @@ -99,7 +99,23 @@ protected void doFilterInternal(HttpServletRequest req, if (authentication != null) { SecurityContextHolder.getContext().setAuthentication(authentication); } - chain.doFilter(req, res); + + try { + chain.doFilter(req, res); + } finally { + // Complete the context to avoid transactions getting stuck in the connection pool in the + // `idle in transaction` state. + // TODO add the issue url + Context context = (Context) req.getAttribute(ContextUtil.DSPACE_CONTEXT); + // Ensure the context is cleared after the request is done + if (context != null && context.isValid()) { + try { + context.abort(); + } catch (Exception e) { + log.error("{} occurred while trying to close", e.getMessage(), e); + } + } + } } /** diff --git a/dspace/config/log4j2.xml b/dspace/config/log4j2.xml index a21cef9d6f3e..77bdbfe33ca9 100644 --- a/dspace/config/log4j2.xml +++ b/dspace/config/log4j2.xml @@ -29,9 +29,11 @@ > + "requestID" are not currently set in the ThreadContext. + Add this to the pattern to include the classId (hash) `%equals{%X{classID}}{}{unknown}`. + --> + pattern='%d %t %-5p %equals{%X{correlationID}}{}{unknown} %equals{%X{requestID}}{}{unknown} %c @ %m%n'/> yyyy-MM-dd From 0b20909a666d2545e057e3b8c97eff772542d1e8 Mon Sep 17 00:00:00 2001 From: Paurikova2 <107862249+Paurikova2@users.noreply.github.com> Date: Thu, 23 Jan 2025 08:38:46 +0100 Subject: [PATCH 34/45] metadata-added-provenance (#730) * added provenance metadata for update, remove and add * added provenance metadata for update and remove bitstream * item test json added metadata provenance * control provenance only by tests of item * update and remove license * provenance access control and upload bitstream * provenance move item to different collection * provenance make item non-discoverable * provenance for mapped collection * don't write provenance for bitstream without item * move addProvenance to parent class * separated class for provenance * create class for provenance management * added metadata item and bitstream tests * tests for metadata provenance * problem with access control test * better service management: * delete clarin license mapping * added json to resources * modified provenance patch messages * messages bags * used correct json * log exception, replace ! by non conds, added doc comments * added logs for replacing mtd * make object from provenance service * removed interface prom provenance msg provider * Refactored method `removeReadPolicies` - add provenance after removing policy * The message templates are loaded from the Enum instead of json file. * The `getMessage` methods was overloaded. * Renamed `getItem` to `findItemByBitstream` to clarify the method's purpose. * Do not throw exception when adding to provenance * The exceptions are caught in the ProvenanceProvider and changed order when calling provenance methods. * Renamed ProvenanceProvider to ProvenanceService and refactored it to be used as Spring Service. * The ProvenanceService wasn't initialized in the BundleAccessControl * Removed the code without any effect * Removed the logging which is not required * Moved calling od the provenance service to better place. --------- Co-authored-by: milanmajchrak --- .../bulkaccesscontrol/BulkAccessControl.java | 5 + .../authorize/ResourcePolicyServiceImpl.java | 16 +- .../content/DSpaceObjectServiceImpl.java | 4 + .../org/dspace/content/ItemServiceImpl.java | 6 + .../factory/ContentServiceFactory.java | 9 + .../factory/ContentServiceFactoryImpl.java | 9 + .../core/ProvenanceMessageFormatter.java | 104 ++++ .../core/ProvenanceMessageTemplates.java | 38 ++ .../org/dspace/core/ProvenanceService.java | 177 +++++++ .../dspace/core/ProvenanceServiceImpl.java | 353 +++++++++++++ .../ClarinLicenseResourceMappingBuilder.java | 15 + .../app/rest/ItemAddBundleController.java | 5 + ...mOwningCollectionUpdateRestController.java | 4 + .../rest/MappedCollectionRestController.java | 7 +- .../rest/repository/BundleRestRepository.java | 5 + .../operation/BitstreamRemoveOperation.java | 11 +- .../DSpaceObjectMetadataAddOperation.java | 14 +- .../DSpaceObjectMetadataRemoveOperation.java | 13 +- .../DSpaceObjectMetadataReplaceOperation.java | 22 +- .../ItemDiscoverableReplaceOperation.java | 6 + .../dspace/app/rest/ItemRestRepositoryIT.java | 3 +- .../app/rest/ProvenanceExpectedMessages.java | 50 ++ .../dspace/app/rest/ProvenanceServiceIT.java | 499 ++++++++++++++++++ .../app/rest/test/MetadataPatchSuite.java | 47 +- .../rest/test/item-metadata-patch-suite.json | 219 ++++++++ dspace/config/spring/api/core-services.xml | 2 + 26 files changed, 1621 insertions(+), 22 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/core/ProvenanceMessageFormatter.java create mode 100644 dspace-api/src/main/java/org/dspace/core/ProvenanceMessageTemplates.java create mode 100644 dspace-api/src/main/java/org/dspace/core/ProvenanceService.java create mode 100644 dspace-api/src/main/java/org/dspace/core/ProvenanceServiceImpl.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceExpectedMessages.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceServiceIT.java create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/test/item-metadata-patch-suite.json diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java index 7bef232f0450..bfb8b24f33e2 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -55,6 +55,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; @@ -111,6 +112,8 @@ public class BulkAccessControl extends DSpaceRunnable resPolicies = resourcePolicyService.find(c, o, type); + resourcePolicyDAO.deleteByDsoAndTypeAndAction(c, o, type, action); c.turnOffAuthorisationSystem(); contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o); c.restoreAuthSystemState(); + + provenanceService.removeReadPolicies(c, o, resPolicies); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java index 2119959073f0..a3d2316d0d05 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java @@ -34,6 +34,7 @@ import org.dspace.content.service.RelationshipService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.dspace.handle.service.HandleService; import org.dspace.identifier.service.IdentifierService; import org.dspace.utils.DSpace; @@ -67,6 +68,8 @@ public abstract class DSpaceObjectServiceImpl implements protected MetadataAuthorityService metadataAuthorityService; @Autowired(required = true) protected RelationshipService relationshipService; + @Autowired(required = true) + protected ProvenanceService provenanceService; public DSpaceObjectServiceImpl() { @@ -377,6 +380,7 @@ public void clearMetadata(Context context, T dso, String schema, String element, } } dso.setMetadataModified(); + provenanceService.removeMetadata(context, dso, schema, element, qualifier); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index e135f614ec4f..fbdd7b97dcca 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -53,6 +53,7 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.core.ProvenanceService; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.SearchService; @@ -174,6 +175,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) ClarinMatomoBitstreamTracker matomoBitstreamTracker; + @Autowired(required = true) + private ProvenanceService provenanceService; + + protected ItemServiceImpl() { super(); } @@ -1134,6 +1139,7 @@ public void move(Context context, Item item, Collection from, Collection to, boo context.addEvent(new Event(Event.MODIFY, Constants.ITEM, item.getID(), null, getIdentifiers(context, item))); } + provenanceService.moveItem(context, item, from); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java index dbe842a4194f..2e2798f49e84 100644 --- a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java +++ b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java @@ -34,6 +34,7 @@ import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.SiteService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.ProvenanceService; import org.dspace.eperson.service.SubscribeService; import org.dspace.handle.service.HandleClarinService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -77,6 +78,7 @@ public abstract class ContentServiceFactory { public abstract SiteService getSiteService(); public abstract SubscribeService getSubscribeService(); + public abstract PreviewContentService getPreviewContentService(); /** @@ -123,6 +125,13 @@ public abstract class ContentServiceFactory { */ public abstract HandleClarinService getHandleClarinService(); + /** + * Return the implementation of the ProvenanceService interface + * + * @return the ProvenanceService + */ + public abstract ProvenanceService getProvenanceService(); + public InProgressSubmissionService getInProgressSubmissionService(InProgressSubmission inProgressSubmission) { if (inProgressSubmission instanceof WorkspaceItem) { return getWorkspaceItemService(); diff --git a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java index a38dec0c0a9d..7b340cafd0c9 100644 --- a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java @@ -31,6 +31,7 @@ import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.SiteService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.ProvenanceService; import org.dspace.eperson.service.SubscribeService; import org.dspace.handle.service.HandleClarinService; import org.springframework.beans.factory.annotation.Autowired; @@ -93,6 +94,9 @@ public class ContentServiceFactoryImpl extends ContentServiceFactory { @Autowired(required = true) private HandleClarinService handleClarinService; + @Autowired(required = true) + private ProvenanceService provenanceService; + @Override public List> getDSpaceObjectServices() { return dSpaceObjectServices; @@ -173,6 +177,11 @@ public PreviewContentService getPreviewContentService() { return previewContentService; } + @Override + public ProvenanceService getProvenanceService() { + return provenanceService; + } + @Override public RelationshipTypeService getRelationshipTypeService() { return relationshipTypeService; diff --git a/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageFormatter.java b/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageFormatter.java new file mode 100644 index 000000000000..a6f486b34c99 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageFormatter.java @@ -0,0 +1,104 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.DCDate; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.InstallItemService; +import org.dspace.eperson.EPerson; + +/** + * The ProvenanceMessageProvider providing methods to generate provenance messages for DSpace items. + * It loads message templates + * from a JSON file and formats messages based on the context, including user details and timestamps. + * + * @author Michaela Paurikova (dspace at dataquest.sk) + */ +public class ProvenanceMessageFormatter { + private InstallItemService installItemService; + + public ProvenanceMessageFormatter() {} + + public String getMessage(Context context, String messageTemplate, Item item, Object... args) + throws SQLException, AuthorizeException { + // Initialize InstallItemService if it is not initialized. + if (installItemService == null) { + installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + } + String msg = getMessage(context, messageTemplate, args); + msg = msg + "\n" + installItemService.getBitstreamProvenanceMessage(context, item); + return msg; + } + + public String getMessage(Context context, String messageTemplate, Object... args) { + EPerson currentUser = context.getCurrentUser(); + String timestamp = DCDate.getCurrent().toString(); + String details = validateMessageTemplate(messageTemplate, args); + return String.format("%s by %s (%s) on %s", + details, + currentUser.getFullName(), + currentUser.getEmail(), + timestamp); + } + + public String getMessage(Item item) { + String msg = "Item was in collections:\n"; + List collsList = item.getCollections(); + for (Collection coll : collsList) { + msg = msg + coll.getName() + " (ID: " + coll.getID() + ")\n"; + } + return msg; + } + + public String getMessage(Bitstream bitstream) { + // values of deleted bitstream + String msg = bitstream.getName() + ": " + + bitstream.getSizeBytes() + " bytes, checksum: " + + bitstream.getChecksum() + " (" + + bitstream.getChecksumAlgorithm() + ")\n"; + return msg; + } + + public String getMessage(List resPolicies) { + return resPolicies.stream() + .filter(rp -> rp.getAction() == Constants.READ) + .map(rp -> String.format("[%s, %s, %d, %s, %s, %s, %s]", + rp.getRpName(), rp.getRpType(), rp.getAction(), + rp.getEPerson() != null ? rp.getEPerson().getEmail() : null, + rp.getGroup() != null ? rp.getGroup().getName() : null, + rp.getStartDate() != null ? rp.getStartDate().toString() : null, + rp.getEndDate() != null ? rp.getEndDate().toString() : null)) + .collect(Collectors.joining(";")); + } + + public String getMetadata(String oldMtdKey, String oldMtdValue) { + return oldMtdKey + ": " + oldMtdValue; + } + + public String getMetadataField(MetadataField metadataField) { + return metadataField.toString() + .replace('_', '.'); + } + + private String validateMessageTemplate(String messageTemplate, Object... args) { + if (messageTemplate == null) { + throw new IllegalArgumentException("The provenance message template is null!"); + } + return String.format(messageTemplate, args); + } +} diff --git a/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageTemplates.java b/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageTemplates.java new file mode 100644 index 000000000000..c14d3f447591 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/ProvenanceMessageTemplates.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +/** + * The ProvenanceMessageTemplates enum provides message templates for provenance messages. + * + * @author Michaela Paurikova (dspace at dataquest.sk) + */ +public enum ProvenanceMessageTemplates { + ACCESS_CONDITION("Access condition (%s) was added to %s (%s)"), + RESOURCE_POLICIES_REMOVED("Resource policies (%s) of %s (%s) were removed"), + BUNDLE_ADDED("Item was added bitstream to bundle (%s)"), + EDIT_LICENSE("License (%s) was %s"), + MOVE_ITEM("Item was moved from collection (%s) to different collection"), + MAPPED_ITEM("Item was mapped to collection (%s)"), + DELETED_ITEM_FROM_MAPPED("Item was deleted from mapped collection (%s)"), + EDIT_BITSTREAM("Item (%s) was deleted bitstream (%s)"), + ITEM_METADATA("Item metadata (%s) was %s"), + BITSTREAM_METADATA("Item metadata (%s) was %s bitstream (%s)"), + ITEM_REPLACE_SINGLE_METADATA("Item bitstream (%s) metadata (%s) was updated"), + DISCOVERABLE("Item was made %sdiscoverable"); + + private final String template; + + ProvenanceMessageTemplates(String template) { + this.template = template; + } + + public String getTemplate() { + return template; + } +} diff --git a/dspace-api/src/main/java/org/dspace/core/ProvenanceService.java b/dspace-api/src/main/java/org/dspace/core/ProvenanceService.java new file mode 100644 index 000000000000..9f34d89f9214 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/ProvenanceService.java @@ -0,0 +1,177 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; + +/** + * The ProvenanceService is responsible for creating provenance metadata for items based on the actions performed. + * + * @author Milan Majchrak (dspace at dataquest.sk) + */ +public interface ProvenanceService { + /** + * Add a provenance message to the item when a new access condition is added + * + * @param context DSpace context object + * @param item item to which the access condition is added + * @param accessControl the access control input + */ + void setItemPolicies(Context context, Item item, BulkAccessControlInput accessControl); + + /** + * Add a provenance message to the item when a read policy is removed + * + * @param context DSpace context object + * @param dso DSpace object from which the read policy is removed + * @param resPolicies list of resource policies that are removed + */ + void removeReadPolicies(Context context, DSpaceObject dso, List resPolicies); + + /** + * Add a provenance message to the item when a bitstream policy is set + * + * @param context DSpace context object + * @param bitstream bitstream to which the policy is set + * @param item item to which the bitstream belongs + * @param accessControl the access control input + */ + void setBitstreamPolicies(Context context, Bitstream bitstream, Item item, + BulkAccessControlInput accessControl); + + /** + * Add a provenance message to the item when an item's license is edited + * + * @param context DSpace context object + * @param item item to which the license is edited + * @param newLicense true if the license is new, false if it's edited + */ + void updateLicense(Context context, Item item, boolean newLicense); + + /** + * Add a provenance message to the item when it's moved to a collection + * + * @param context DSpace context object + * @param item item that is moved + * @param collection collection to which the item is moved + */ + void moveItem(Context context, Item item, Collection collection); + + /** + * Add a provenance message to the item when it's mapped to a collection + * + * @param context DSpace context object + * @param item item that is mapped + * @param collection collection to which the item is mapped + */ + void mappedItem(Context context, Item item, Collection collection); + + /** + * Add a provenance message to the item when it's deleted from a mapped collection + * + * @param context DSpace context object + * @param item item that is deleted from a mapped collection + * @param collection collection from which the item is deleted + */ + void deletedItemFromMapped(Context context, Item item, Collection collection); + + /** + * Add a provenance message to the item when it's bitstream is deleted + * + * @param context DSpace context object + * @param bitstream deleted bitstream + * @param item item from which the bitstream is deleted + */ + void deleteBitstream(Context context, Bitstream bitstream, Item item); + + /** + * Add a provenance message to the item when metadata is added + * + * @param context DSpace context object + * @param dso DSpace object to which the metadata is added + * @param metadataField metadata field that is added + */ + void addMetadata(Context context, DSpaceObject dso, MetadataField metadataField); + + /** + * Add a provenance message to the item when metadata is removed + * + * @param context DSpace context object + * @param dso DSpace object from which the metadata is removed + */ + void removeMetadata(Context context, DSpaceObject dso, String schema, String element, String qualifier); + + /** + * Add a provenance message to the item when metadata is removed at a given index + * + * @param context DSpace context object + * @param dso DSpace object from which the metadata is removed + * @param metadataValues list of metadata values + * @param indexInt index at which the metadata is removed + */ + void removeMetadataAtIndex(Context context, DSpaceObject dso, List metadataValues, + int indexInt); + + /** + * Add a provenance message to the item when metadata is replaced + * + * @param context DSpace context object + * @param dso DSpace object to which the metadata is replaced + * @param metadataField metadata field that is replaced + * @param oldMtdVal old metadata value + */ + void replaceMetadata(Context context, DSpaceObject dso, MetadataField metadataField, String oldMtdVal); + + /** + * Add a provenance message to the item when metadata is replaced + * + * @param context DSpace context object + * @param dso DSpace object to which the metadata is replaced + * @param metadataField metadata field that is replaced + * @param oldMtdVal old metadata value + */ + void replaceMetadataSingle(Context context, DSpaceObject dso, MetadataField metadataField, + String oldMtdVal); + + /** + * Add a provenance message to the item when metadata is updated + * + * @param context DSpace context object + * @param item item to which the metadata is updated + * @param discoverable true if the item is discoverable, false if it's not + */ + void makeDiscoverable(Context context, Item item, boolean discoverable); + + /** + * Add a provenance message to the item when a bitstream is uploaded + * + * @param context DSpace context object + * @param bundle bundle to which the bitstream is uploaded + */ + void uploadBitstream(Context context, Bundle bundle); + + /** + * Fetch an Item object using a service and return the first Item object from the list. + * Log an error if the list is empty or if there is an SQL error + * + * @param context DSpace context object + * @param bitstream bitstream to which the item is fetched + */ + Item findItemByBitstream(Context context, Bitstream bitstream); + +} diff --git a/dspace-api/src/main/java/org/dspace/core/ProvenanceServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/ProvenanceServiceImpl.java new file mode 100644 index 000000000000..71b7128e5565 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/ProvenanceServiceImpl.java @@ -0,0 +1,353 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.app.bulkaccesscontrol.model.AccessCondition; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.MetadataValue; +import org.dspace.content.clarin.ClarinLicenseResourceMapping; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.clarin.ClarinItemService; +import org.dspace.content.service.clarin.ClarinLicenseResourceMappingService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * ProvenanceServiceImpl is an implementation of ProvenanceService. + * + * @author Michaela Paurikova (dspace at dataquest.sk) + */ +public class ProvenanceServiceImpl implements ProvenanceService { + private static final Logger log = LogManager.getLogger(ProvenanceServiceImpl.class); + + @Autowired + private ItemService itemService; + @Autowired + private ClarinItemService clarinItemService; + @Autowired + private ClarinLicenseResourceMappingService clarinResourceMappingService; + @Autowired + private BitstreamService bitstreamService; + + private final ProvenanceMessageFormatter messageProvider = new ProvenanceMessageFormatter(); + + public void setItemPolicies(Context context, Item item, BulkAccessControlInput accessControl) { + String resPoliciesStr = extractAccessConditions(accessControl.getItem().getAccessConditions()); + if (StringUtils.isNotBlank(resPoliciesStr)) { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.ACCESS_CONDITION.getTemplate(), + resPoliciesStr, "item", item.getID()); + try { + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when setting item policies.", e); + } + } + } + + public void removeReadPolicies(Context context, DSpaceObject dso, List resPolicies) { + if (resPolicies.isEmpty()) { + return; + } + String resPoliciesStr = messageProvider.getMessage(resPolicies); + try { + if (dso.getType() == Constants.ITEM) { + Item item = (Item) dso; + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.RESOURCE_POLICIES_REMOVED.getTemplate(), + resPoliciesStr.isEmpty() ? "empty" : resPoliciesStr, "item", item.getID()); + addProvenanceMetadata(context, item, msg); + } else if (dso.getType() == Constants.BITSTREAM) { + Bitstream bitstream = (Bitstream) dso; + Item item = findItemByBitstream(context, bitstream); + if (Objects.nonNull(item)) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.RESOURCE_POLICIES_REMOVED.getTemplate(), + resPoliciesStr.isEmpty() ? "empty" : resPoliciesStr, "bitstream", bitstream.getID()); + addProvenanceMetadata(context, item, msg); + } + } + } catch (SQLException | AuthorizeException e) { + log.error("Unable to remove read policies from the DSpace object.", e); + } + } + + public void setBitstreamPolicies(Context context, Bitstream bitstream, Item item, + BulkAccessControlInput accessControl) { + String accConditionsStr = extractAccessConditions(accessControl.getBitstream().getAccessConditions()); + if (StringUtils.isNotBlank(accConditionsStr)) { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.ACCESS_CONDITION.getTemplate(), + accConditionsStr, "bitstream", bitstream.getID()); + try { + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when setting bitstream policies.", e); + } + } + } + + public void updateLicense(Context context, Item item, boolean newLicense) { + String oldLicense = null; + + try { + oldLicense = findLicenseInBundles(item, Constants.LICENSE_BUNDLE_NAME, oldLicense, context); + if (oldLicense == null) { + oldLicense = findLicenseInBundles(item, Constants.CONTENT_BUNDLE_NAME, oldLicense, context); + } + + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.EDIT_LICENSE.getTemplate(), + item, Objects.isNull(oldLicense) ? "empty" : oldLicense, + !newLicense ? "removed" : Objects.isNull(oldLicense) ? "added" : "updated"); + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when editing Item's license.", e); + } + + } + + public void moveItem(Context context, Item item, Collection collection) { + try { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.MOVE_ITEM.getTemplate(), + item, collection.getID()); + // Update item in DB + // Because a user can move an item without authorization turn off authorization + context.turnOffAuthorisationSystem(); + addProvenanceMetadata(context, item, msg); + context.restoreAuthSystemState(); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when moving an item to a different collection.", + e); + } + } + + public void mappedItem(Context context, Item item, Collection collection) { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.MAPPED_ITEM.getTemplate(), + collection.getID()); + try { + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when mapping an item into a collection.", e); + } + } + + public void deletedItemFromMapped(Context context, Item item, Collection collection) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.DELETED_ITEM_FROM_MAPPED.getTemplate(), collection.getID()); + try { + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when deleting an item from a mapped collection.", + e); + } + } + + public void deleteBitstream(Context context, Bitstream bitstream, Item item) { + try { + if (Objects.nonNull(item)) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.EDIT_BITSTREAM.getTemplate(), item, item.getID(), + messageProvider.getMessage(bitstream)); + addProvenanceMetadata(context, item, msg); + } + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when deleting a bitstream.", e); + } + } + + public void addMetadata(Context context, DSpaceObject dso, MetadataField metadataField) { + try { + if (Constants.ITEM == dso.getType()) { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.ITEM_METADATA.getTemplate(), + messageProvider.getMetadataField(metadataField), "added"); + addProvenanceMetadata(context, (Item) dso, msg); + } + + if (dso.getType() == Constants.BITSTREAM) { + Bitstream bitstream = (Bitstream) dso; + Item item = findItemByBitstream(context, bitstream); + if (Objects.nonNull(item)) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.BITSTREAM_METADATA.getTemplate(), item, + messageProvider.getMetadataField(metadataField), "added by", + messageProvider.getMessage(bitstream)); + addProvenanceMetadata(context, item, msg); + } + } + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when adding metadata to a DSpace object.", e); + } + } + + public void removeMetadata(Context context, DSpaceObject dso, String schema, String element, String qualifier) { + if (dso.getType() != Constants.BITSTREAM) { + return; + } + MetadataField oldMtdKey = null; + String oldMtdValue = null; + List mtd = bitstreamService.getMetadata((Bitstream) dso, schema, element, qualifier, Item.ANY); + if (CollectionUtils.isEmpty(mtd)) { + // Do not add any provenance message when there are no metadata to remove + return; + } + oldMtdKey = mtd.get(0).getMetadataField(); + oldMtdValue = mtd.get(0).getValue(); + Bitstream bitstream = (Bitstream) dso; + try { + Item item = findItemByBitstream(context, bitstream); + if (Objects.nonNull(item)) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.BITSTREAM_METADATA.getTemplate(), item, + messageProvider.getMetadata(messageProvider.getMetadataField(oldMtdKey), oldMtdValue), + "deleted from", messageProvider.getMessage(bitstream)); + addProvenanceMetadata(context, item, msg); + } + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when removing metadata from a dso.", e); + } + + } + + public void removeMetadataAtIndex(Context context, DSpaceObject dso, List metadataValues, + int indexInt) { + if (dso.getType() != Constants.ITEM) { + return; + } + // Remember removed mtd + String oldMtdKey = messageProvider.getMetadataField(metadataValues.get(indexInt).getMetadataField()); + String oldMtdValue = metadataValues.get(indexInt).getValue(); + try { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.ITEM_METADATA.getTemplate(), + (Item) dso, messageProvider.getMetadata(oldMtdKey, oldMtdValue), "deleted"); + addProvenanceMetadata(context, (Item) dso, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when removing metadata at a specific index " + + "from a dso", e); + } + } + + public void replaceMetadata(Context context, DSpaceObject dso, MetadataField metadataField, String oldMtdVal) { + if (dso.getType() != Constants.ITEM) { + return; + } + try { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.ITEM_METADATA.getTemplate(), + (Item) dso,messageProvider.getMetadata(messageProvider.getMetadataField(metadataField), + oldMtdVal), "updated"); + addProvenanceMetadata(context, (Item) dso, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when replacing metadata in a dso.", e); + } + + } + + public void replaceMetadataSingle(Context context, DSpaceObject dso, MetadataField metadataField, + String oldMtdVal) { + if (dso.getType() != Constants.BITSTREAM) { + return; + } + + Bitstream bitstream = (Bitstream) dso; + try { + Item item = findItemByBitstream(context, bitstream); + if (Objects.nonNull(item)) { + String msg = messageProvider.getMessage(context, + ProvenanceMessageTemplates.ITEM_REPLACE_SINGLE_METADATA.getTemplate(), item, + messageProvider.getMessage(bitstream), + messageProvider.getMetadata(messageProvider.getMetadataField(metadataField), oldMtdVal)); + addProvenanceMetadata(context, item, msg);; + } + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when replacing metadata in a item.", e); + } + } + + public void makeDiscoverable(Context context, Item item, boolean discoverable) { + try { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.DISCOVERABLE.getTemplate(), + item, discoverable ? "" : "non-") + messageProvider.getMessage(item); + addProvenanceMetadata(context, item, msg); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when making an item discoverable.", e); + } + } + + public void uploadBitstream(Context context, Bundle bundle) { + Item item = bundle.getItems().get(0); + try { + String msg = messageProvider.getMessage(context, ProvenanceMessageTemplates.BUNDLE_ADDED.getTemplate(), + item, bundle.getID()); + addProvenanceMetadata(context,item, msg); + itemService.update(context, item); + } catch (SQLException | AuthorizeException e) { + log.error("Unable to add new provenance metadata when updating an item's bitstream.", e); + } + } + + private void addProvenanceMetadata(Context context, Item item, String msg) + throws SQLException, AuthorizeException { + itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", msg); + itemService.update(context, item); + } + + private String extractAccessConditions(List accessConditions) { + return accessConditions.stream() + .map(AccessCondition::getName) + .collect(Collectors.joining(";")); + } + + public Item findItemByBitstream(Context context, Bitstream bitstream) { + List items = null; + try { + items = clarinItemService.findByBitstreamUUID(context, bitstream.getID()); + } catch (SQLException e) { + log.error("Unable to find item by bitstream (" + bitstream.getID() + " ).", e); + return null; + } + if (items.isEmpty()) { + log.warn("Bitstream (" + bitstream.getID() + ") is not assigned to any item."); + return null; + } + return items.get(0); + } + + private String findLicenseInBundles(Item item, String bundleName, String currentLicense, Context context) + throws SQLException { + List bundles = item.getBundles(bundleName); + for (Bundle clarinBundle : bundles) { + List bitstreamList = clarinBundle.getBitstreams(); + for (Bitstream bundleBitstream : bitstreamList) { + if (Objects.isNull(currentLicense)) { + List mappings = + this.clarinResourceMappingService.findByBitstreamUUID(context, bundleBitstream.getID()); + if (CollectionUtils.isNotEmpty(mappings)) { + return mappings.get(0).getLicense().getName(); + } + } + } + } + return currentLicense; + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/ClarinLicenseResourceMappingBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ClarinLicenseResourceMappingBuilder.java index 4a39a44fd4b1..c2bd6ab77831 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ClarinLicenseResourceMappingBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ClarinLicenseResourceMappingBuilder.java @@ -8,6 +8,7 @@ package org.dspace.builder; import java.sql.SQLException; +import java.util.Objects; import org.dspace.authorize.AuthorizeException; import org.dspace.content.clarin.ClarinLicenseResourceMapping; @@ -37,6 +38,20 @@ private ClarinLicenseResourceMappingBuilder create(final Context context) { return this; } + public static void delete(Integer id) throws Exception { + if (Objects.isNull(id)) { + return; + } + try (Context c = new Context()) { + ClarinLicenseResourceMapping clarinLicense = clarinLicenseResourceMappingService.find(c, id); + + if (clarinLicense != null) { + clarinLicenseResourceMappingService.delete(c, clarinLicense); + } + c.complete(); + } + } + @Override public void cleanup() throws Exception { try (Context c = new Context()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java index b3444a739e77..74f28bbfd19c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemAddBundleController.java @@ -38,6 +38,7 @@ import org.dspace.content.service.clarin.ClarinLicenseService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -98,6 +99,9 @@ public class ItemAddBundleController { @Autowired ClarinLicenseResourceMappingService clarinLicenseResourceMappingService; + @Autowired + ProvenanceService provenanceService; + /** * Method to add a Bundle to an Item with the given UUID in the URL. This will create a Bundle with the * name provided in the request and attach this to the Item that matches the UUID in the URL. @@ -183,6 +187,7 @@ public ItemRest updateLicenseForBundle(@PathVariable UUID uuid, } itemService.update(context, item); + provenanceService.updateLicense(context, item, !Objects.isNull(clarinLicense)); context.commit(); return converter.toRest(item, utils.obtainProjection()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java index b5a0c957f265..109b79c86111 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemOwningCollectionUpdateRestController.java @@ -32,6 +32,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PostAuthorize; @@ -65,6 +66,9 @@ public class ItemOwningCollectionUpdateRestController { @Autowired Utils utils; + @Autowired + ProvenanceService provenanceService; + /** * This method will update the owning collection of the item that correspond to the provided item uuid, effectively * moving the item to the new collection. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/MappedCollectionRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/MappedCollectionRestController.java index 14dae21ebec0..09581f5e2998 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/MappedCollectionRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/MappedCollectionRestController.java @@ -30,6 +30,7 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.PathVariable; @@ -57,6 +58,9 @@ public class MappedCollectionRestController { @Autowired Utils utils; + @Autowired + ProvenanceService provenanceService; + /** * This method will add an Item to a Collection. The Collection object is encapsulated in the request due to the * text/uri-list consumer and the Item UUID comes from the path in the URL @@ -105,6 +109,7 @@ public void createCollectionToItemRelation(@PathVariable UUID uuid, collectionService.addItem(context, collectionToMapTo, item); collectionService.update(context, collectionToMapTo); itemService.update(context, item); + provenanceService.mappedItem(context, item, collectionToMapTo); } else { throw new UnprocessableEntityException("Not a valid collection or item uuid."); } @@ -151,12 +156,12 @@ public void deleteCollectionToItemRelation(@PathVariable UUID uuid, @PathVariabl collectionService.removeItem(context, collection, item); collectionService.update(context, collection); itemService.update(context, item); + provenanceService.deletedItemFromMapped(context,item, collection); context.commit(); } } else { throw new UnprocessableEntityException("Not a valid collection or item uuid."); } - } private void checkIfItemIsTemplate(Item item) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundleRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundleRestRepository.java index f750743db66e..7f800ea81813 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundleRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BundleRestRepository.java @@ -35,6 +35,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -69,6 +70,9 @@ public class BundleRestRepository extends DSpaceObjectRestRepository { BitstreamService bitstreamService; @Autowired AuthorizeService authorizeService; + @Autowired + ProvenanceService provenanceService; public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; @Override @@ -53,9 +57,14 @@ public Bitstream perform(Context context, Bitstream resource, Operation operatio throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); } authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); - try { + // Find the item to which the bitstream belongs before deleting the bitstream, + // because after deletion, the item will no longer be connected to the bitstream. + Item item = provenanceService.findItemByBitstream(context, bitstreamToDelete); + // Delete the bitstream bitstreamService.delete(context, bitstreamToDelete); + // Update the provenance metadata after the bitstream has been successfully deleted + provenanceService.deleteBitstream(context, bitstreamToDelete, item); } catch (AuthorizeException | IOException e) { throw new RuntimeException(e.getMessage(), e); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataAddOperation.java index 4b27ae963ab0..4f343019942b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataAddOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataAddOperation.java @@ -17,6 +17,9 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -34,8 +37,11 @@ @Component public class DSpaceObjectMetadataAddOperation extends PatchOperation { + private static final Logger log = LoggerFactory.getLogger(DSpaceObjectMetadataAddOperation.class); @Autowired DSpaceObjectMetadataPatchUtils metadataPatchUtils; + @Autowired + ProvenanceService provenanceService; @Override public R perform(Context context, R resource, Operation operation) throws SQLException { @@ -69,9 +75,13 @@ private void add(Context context, DSpaceObject dso, DSpaceObjectService dsoServi dsoService.addAndShiftRightMetadata(context, dso, metadataField.getMetadataSchema().getName(), metadataField.getElement(), metadataField.getQualifier(), metadataValue.getLanguage(), metadataValue.getValue(), metadataValue.getAuthority(), metadataValue.getConfidence(), indexInt); + provenanceService.addMetadata(context, dso, metadataField); } catch (SQLException e) { - throw new DSpaceBadRequestException("SQLException in DspaceObjectMetadataAddOperation.add trying to add " + - "metadata to dso.", e); + String msg; + msg = "SQLException in DspaceObjectMetadataAddOperation.add trying to add " + + "metadata to dso."; + log.error(msg, e); + throw new DSpaceBadRequestException(msg, e); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataRemoveOperation.java index 3164ae377aeb..2d09150233e3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataRemoveOperation.java @@ -21,6 +21,9 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -41,8 +44,11 @@ @Component public class DSpaceObjectMetadataRemoveOperation extends PatchOperation { + private static final Logger log = LoggerFactory.getLogger(DSpaceObjectMetadataRemoveOperation.class); @Autowired DSpaceObjectMetadataPatchUtils metadataPatchUtils; + @Autowired + ProvenanceService provenanceService; @Override public R perform(Context context, R resource, Operation operation) throws SQLException { @@ -82,6 +88,7 @@ private void remove(Context context, DSpaceObject dso, DSpaceObjectService dsoSe // remove that metadata dsoService.removeMetadataValues(context, dso, Arrays.asList(metadataValues.get(indexInt))); + provenanceService.removeMetadataAtIndex(context, dso, metadataValues, indexInt); } else { throw new UnprocessableEntityException("UnprocessableEntityException - There is no metadata of " + "this type at that index"); @@ -91,9 +98,9 @@ private void remove(Context context, DSpaceObject dso, DSpaceObjectService dsoSe throw new IllegalArgumentException("This index (" + index + ") is not valid number.", e); } catch (ArrayIndexOutOfBoundsException e) { throw new UnprocessableEntityException("There is no metadata of this type at that index"); - } catch (SQLException e) { - throw new DSpaceBadRequestException("SQLException in DspaceObjectMetadataRemoveOperation.remove " + - "trying to remove metadata from dso.", e); + } catch (SQLException ex) { + throw new DSpaceBadRequestException("SQLException in DspaceObjectMetadataRemoveOperation.remove" + + " trying to remove metadata from dso.", ex); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataReplaceOperation.java index 1cf15684587b..129c99a3c9cd 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/DSpaceObjectMetadataReplaceOperation.java @@ -21,6 +21,9 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -38,9 +41,11 @@ */ @Component public class DSpaceObjectMetadataReplaceOperation extends PatchOperation { - + private static final Logger log = LoggerFactory.getLogger(DSpaceObjectMetadataReplaceOperation.class); @Autowired DSpaceObjectMetadataPatchUtils metadataPatchUtils; + @Autowired + ProvenanceService provenanceService; @Override public R perform(Context context, R resource, Operation operation) throws SQLException { @@ -91,11 +96,12 @@ private void replace(Context context, DSpaceObject dso, DSpaceObjectService dsoS } // replace single existing metadata value if (propertyOfMd == null) { - this.replaceSingleMetadataValue(dso, dsoService, metadataField, metadataValue, index); + this.replaceSingleMetadataValue(context, dso, dsoService, metadataField, metadataValue, index); return; } // replace single property of exiting metadata value - this.replaceSinglePropertyOfMdValue(dso, dsoService, metadataField, index, propertyOfMd, valueMdProperty); + this.replaceSinglePropertyOfMdValue(context, dso, dsoService, metadataField, + index, propertyOfMd, valueMdProperty); } /** @@ -145,9 +151,10 @@ private void replaceMetadataFieldMetadata(Context context, DSpaceObject dso, DSp * @param index index of md being replaced */ // replace single existing metadata value - private void replaceSingleMetadataValue(DSpaceObject dso, DSpaceObjectService dsoService, + private void replaceSingleMetadataValue(Context context, DSpaceObject dso, DSpaceObjectService dsoService, MetadataField metadataField, MetadataValueRest metadataValue, String index) { + String msg; try { List metadataValues = dsoService.getMetadata(dso, metadataField.getMetadataSchema().getName(), metadataField.getElement(), @@ -157,11 +164,13 @@ private void replaceSingleMetadataValue(DSpaceObject dso, DSpaceObjectService ds && metadataValues.get(indexInt) != null) { // Alter this existing md MetadataValue existingMdv = metadataValues.get(indexInt); + String oldMtdVal = existingMdv.getValue(); existingMdv.setAuthority(metadataValue.getAuthority()); existingMdv.setConfidence(metadataValue.getConfidence()); existingMdv.setLanguage(metadataValue.getLanguage()); existingMdv.setValue(metadataValue.getValue()); dsoService.setMetadataModified(dso); + provenanceService.replaceMetadata(context, dso, metadataField, oldMtdVal); } else { throw new UnprocessableEntityException("There is no metadata of this type at that index"); } @@ -179,7 +188,7 @@ private void replaceSingleMetadataValue(DSpaceObject dso, DSpaceObjectService ds * @param propertyOfMd property of md being replaced * @param valueMdProperty new value of property of md being replaced */ - private void replaceSinglePropertyOfMdValue(DSpaceObject dso, DSpaceObjectService dsoService, + private void replaceSinglePropertyOfMdValue(Context context, DSpaceObject dso, DSpaceObjectService dsoService, MetadataField metadataField, String index, String propertyOfMd, String valueMdProperty) { try { @@ -190,6 +199,8 @@ private void replaceSinglePropertyOfMdValue(DSpaceObject dso, DSpaceObjectServic if (indexInt >= 0 && metadataValues.size() > indexInt && metadataValues.get(indexInt) != null) { // Alter only asked propertyOfMd MetadataValue existingMdv = metadataValues.get(indexInt); + String oldMtdVal = existingMdv.getValue(); + if (propertyOfMd.equals("authority")) { existingMdv.setAuthority(valueMdProperty); } @@ -203,6 +214,7 @@ private void replaceSinglePropertyOfMdValue(DSpaceObject dso, DSpaceObjectServic existingMdv.setValue(valueMdProperty); } dsoService.setMetadataModified(dso); + provenanceService.replaceMetadataSingle(context, dso, metadataField, oldMtdVal); } else { throw new UnprocessableEntityException("There is no metadata of this type at that index"); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ItemDiscoverableReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ItemDiscoverableReplaceOperation.java index df17d4e92da3..22f136638c32 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ItemDiscoverableReplaceOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ItemDiscoverableReplaceOperation.java @@ -12,6 +12,8 @@ import org.dspace.app.rest.model.patch.Operation; import org.dspace.content.Item; import org.dspace.core.Context; +import org.dspace.core.ProvenanceService; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** @@ -31,6 +33,9 @@ public class ItemDiscoverableReplaceOperation extends PatchOperation { */ private static final String OPERATION_PATH_DISCOVERABLE = "/discoverable"; + @Autowired + ProvenanceService provenanceService; + @Override public R perform(Context context, R object, Operation operation) { checkOperationValue(operation.getValue()); @@ -41,6 +46,7 @@ public R perform(Context context, R object, Operation operation) { throw new UnprocessableEntityException("A template item cannot be discoverable."); } item.setDiscoverable(discoverable); + provenanceService.makeDiscoverable(context, item, discoverable); return object; } else { throw new DSpaceBadRequestException("ItemDiscoverableReplaceOperation does not support this operation"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 8f139a03f5d2..4e6337d33fef 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -2439,7 +2439,8 @@ private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Ex context.restoreAuthSystemState(); String token = getAuthToken(asUser.getEmail(), password); - new MetadataPatchSuite().runWith(getClient(token), "/api/core/items/" + item.getID(), expectedStatus); + new MetadataPatchSuite("item-metadata-patch-suite.json").runWith(getClient(token), + "/api/core/items/" + item.getID(), expectedStatus); } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceExpectedMessages.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceExpectedMessages.java new file mode 100644 index 000000000000..d95412a20bfd --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceExpectedMessages.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +/** + * The ProvenanceExpectedMessages enum provides message templates for provenance messages. + * + * @author Michaela Paurikova (dspace at dataquest.sk) + */ +public enum ProvenanceExpectedMessages { + DISCOVERABLE("Item was made discoverable by first (admin) last (admin) (admin@email.com) on \nNo. " + + "of bitstreams: 0\nItem was in collections:\n"), + NON_DISCOVERABLE("Item was made non-discoverable by first (admin) last (admin) (admin@email.com) on " + + "\nNo. of bitstreams: 0\nItem was in collections:\n"), + MAPPED_COL("was mapped to collection"), + ADD_ITEM_MTD("Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on"), + REPLACE_ITEM_MTD("Item metadata (dc.title: Public item 1) was updated by first (admin) last (admin) " + + "(admin@email.com) on \nNo. of bitstreams: 0"), + REMOVE_ITEM_MTD("Item metadata (dc.title: Public item 1) was deleted by first (admin) last (admin) " + + "(admin@email.com) on \nNo. of bitstreams: 0"), + REMOVE_BITSTREAM_MTD("Item metadata (dc.description) was added by bitstream"), + REPLACE_BITSTREAM_MTD("metadata (dc.title: test) was updated by first (admin) last (admin) " + + "(admin@email.com) on \nNo. of bitstreams: 1\n"), + REMOVE_BITSTREAM("was deleted bitstream"), + ADD_BITSTREAM("Item was added bitstream to bundle"), + UPDATE_LICENSE("License (Test 1) was updated by first (admin) last (admin) (admin@email.com) " + + "on \nNo. of bitstreams: 1\n"), + ADD_LICENSE("License (empty) was added by first (admin) last (admin) (admin@email.com) on \nNo." + + " of bitstreams: 0"), + REMOVE_LICENSE("License (Test) was removed by first (admin) last (admin) (admin@email.com) on " + + "\nNo. of bitstreams: 1\n"), + MOVED_ITEM_COL("Item was moved from collection "); + + private final String template; + + // Constructor to initialize enum with the template string + ProvenanceExpectedMessages(String template) { + this.template = template; + } + + // Method to retrieve the template string + public String getTemplate() { + return template; + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceServiceIT.java new file mode 100644 index 000000000000..d9aa3768e250 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProvenanceServiceIT.java @@ -0,0 +1,499 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static java.nio.charset.Charset.defaultCharset; +import static org.apache.commons.io.IOUtils.toInputStream; +import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST_VALUE; +import static org.springframework.http.MediaType.parseMediaType; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.ws.rs.core.MediaType; + +import org.dspace.app.rest.model.patch.AddOperation; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; +import org.dspace.app.rest.model.patch.ReplaceOperation; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.ClarinLicenseBuilder; +import org.dspace.builder.ClarinLicenseLabelBuilder; +import org.dspace.builder.ClarinLicenseResourceMappingBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.clarin.ClarinLicense; +import org.dspace.content.clarin.ClarinLicenseLabel; +import org.dspace.content.clarin.ClarinLicenseResourceMapping; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.clarin.ClarinLicenseLabelService; +import org.dspace.content.service.clarin.ClarinLicenseService; +import org.dspace.core.Constants; +import org.dspace.discovery.SearchServiceException; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.mock.web.MockMultipartFile; +import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; + +public class ProvenanceServiceIT extends AbstractControllerIntegrationTest { + @Autowired + private ItemService itemService; + @Autowired + private ClarinLicenseLabelService clarinLicenseLabelService; + @Autowired + private ClarinLicenseService clarinLicenseService; + + private Collection collection; + private Item item; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity).build(); + item = ItemBuilder.createItem(context, collection) + .withTitle("Public item 1") + .build(); + context.restoreAuthSystemState(); + } + + @After + @Override + public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); + // Delete community created in init() + try { + ItemBuilder.deleteItem(item.getID()); + CollectionBuilder.deleteCollection(collection.getID()); + CommunityBuilder.deleteCommunity(parentCommunity.getID()); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + + item = null; + collection = null; + parentCommunity = null; + super.destroy(); + } + + @Test + public void updateLicenseTest() throws Exception { + Bitstream bitstream = createBitstream(item, Constants.LICENSE_BUNDLE_NAME); + ClarinLicense clarinLicense1 = createClarinLicense("Test 1", "Test Def"); + ClarinLicenseResourceMapping mapping = createResourceMapping(clarinLicense1, bitstream); + ClarinLicense clarinLicense2 = createClarinLicense("Test 2", "Test Def"); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put("/api/core/items/" + item.getID() + "/bundles") + .param("licenseID", clarinLicense2.getID().toString())) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.UPDATE_LICENSE.getTemplate()); + + deleteBitstream(bitstream); + deleteClarinLicense(clarinLicense1); + deleteClarinLicense(clarinLicense2); + deleteResourceMapping(mapping.getID()); + } + + @Test + public void addLicenseTest() throws Exception { + ClarinLicense clarinLicense = createClarinLicense("Test", "Test Def"); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put("/api/core/items/" + item.getID() + "/bundles") + .param("licenseID", clarinLicense.getID().toString())) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.ADD_LICENSE.getTemplate()); + + deleteClarinLicense(clarinLicense); + } + + @Test + public void removeLicenseTest() throws Exception { + Bitstream bitstream = createBitstream(item, Constants.LICENSE_BUNDLE_NAME); + ClarinLicense clarinLicense = createClarinLicense("Test", "Test Def"); + ClarinLicenseResourceMapping mapping = createResourceMapping(clarinLicense, bitstream); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(put("/api/core/items/" + item.getID() + "/bundles") + .param("licenseID", "-1")) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.REMOVE_LICENSE.getTemplate()); + + deleteBitstream(bitstream); + deleteClarinLicense(clarinLicense); + deleteResourceMapping(mapping.getID()); + } + + @Test + public void makeDiscoverableTest() throws Exception { + item.setDiscoverable(false); + String token = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + ReplaceOperation replaceOperation = new ReplaceOperation("/discoverable", true); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + getClient(token).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.uuid", Matchers.is(item.getID().toString()))) + .andExpect(jsonPath("$.discoverable", Matchers.is(true))); + + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.DISCOVERABLE.getTemplate()); + } + + @Test + public void makeNonDiscoverableTest() throws Exception { + item.setDiscoverable(true); + String token = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + ReplaceOperation replaceOperation = new ReplaceOperation("/discoverable", false); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + getClient(token).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.NON_DISCOVERABLE.getTemplate()); + } + + @Test + public void addedToMappedCollTest() throws Exception { + Collection coll = createCollection(); + + String adminToken = getAuthToken(admin.getEmail(), password); + getClient(adminToken).perform( + post("/api/core/items/" + item.getID() + "/mappedCollections/") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content( + "https://localhost:8080/spring-rest/api/core/collections/" + coll.getID() + "\n" + ) + ); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.MAPPED_COL.getTemplate()); + + deleteCollection(coll.getID()); + } + + @Test + public void addItemMetadataTest() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + AddOperation addOperation = new AddOperation("/metadata/dc.title", "Test"); + ops.add(addOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.ADD_ITEM_MTD.getTemplate()); + } + + @Test + public void replaceItemMetadataTest() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + int index = 0; + List ops = new ArrayList<>(); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title/" + index, "Test"); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.REPLACE_ITEM_MTD.getTemplate()); + } + + @Test + public void removeItemMetadataTest() throws Exception { + int index = 0; + String adminToken = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + RemoveOperation removeOperation = new RemoveOperation("/metadata/dc.title/" + index); + ops.add(removeOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/items/" + item.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.REMOVE_ITEM_MTD.getTemplate()); + } + + @Test + public void removeBitstreamMetadataTest() throws Exception { + Bitstream bitstream = createBitstream(item, "test"); + + String adminToken = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + AddOperation addOperation = new AddOperation("/metadata/dc.description", "test"); + ops.add(addOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/bitstreams/" + bitstream.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), + ProvenanceExpectedMessages.REMOVE_BITSTREAM_MTD.getTemplate()); + + deleteBitstream(bitstream); + } + + @Test + public void addBitstreamMetadataTest() throws Exception { + Bitstream bitstream = createBitstream(item, "test"); + + String adminToken = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + AddOperation addOperation = new AddOperation("/metadata/dc.description", "test"); + ops.add(addOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/bitstreams/" + bitstream.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), + ProvenanceExpectedMessages.REMOVE_BITSTREAM_MTD.getTemplate()); + } + + @Test + public void updateMetadataBitstreamTest() throws Exception { + Bitstream bitstream = createBitstream(item, "test"); + bitstream.setName(context, "test"); + + String adminToken = getAuthToken(admin.getEmail(), password); + int index = 0; + List ops = new ArrayList<>(); + ReplaceOperation replaceOperation = new ReplaceOperation("/metadata/dc.title/" + index + "/value", "test 1"); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/bitstreams/" + bitstream.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), + ProvenanceExpectedMessages.REPLACE_BITSTREAM_MTD.getTemplate()); + + deleteBitstream(bitstream); + } + + @Test + public void removeBitstreamFromItemTest() throws Exception { + Bitstream bitstream = createBitstream(item, "test"); + + String adminToken = getAuthToken(admin.getEmail(), password); + List ops = new ArrayList<>(); + RemoveOperation removeOperation = new RemoveOperation("/bitstreams/" + bitstream.getID()); + ops.add(removeOperation); + String patchBody = getPatchContent(ops); + getClient(adminToken).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.REMOVE_BITSTREAM.getTemplate()); + + deleteBitstream(bitstream); + } + + @Test + public void addBitstreamToItemTest() throws Exception { + Bundle bundle = createBundle(item, "test"); + + String token = getAuthToken(admin.getEmail(), password); + String input = "Hello, World!"; + context.turnOffAuthorisationSystem(); + MockMultipartFile file = new MockMultipartFile("file", "hello.txt", + org.springframework.http.MediaType.TEXT_PLAIN_VALUE, + input.getBytes()); + context.restoreAuthSystemState(); + getClient(token) + .perform(MockMvcRequestBuilders.multipart("/api/core/bundles/" + bundle.getID() + "/bitstreams") + .file(file)) + .andExpect(status().isCreated()); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.ADD_BITSTREAM.getTemplate()); + + deleteBundle(bundle.getID()); + } + + @Test + public void moveItemColTest() throws Exception { + Collection col = createCollection(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token) + .perform(put("/api/core/items/" + item.getID() + "/owningCollection/") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content( + "https://localhost:8080/spring-rest/api/core/collections/" + col.getID() + )) + .andExpect(status().isOk()); + objectCheck(itemService.find(context, item.getID()), ProvenanceExpectedMessages.MOVED_ITEM_COL.getTemplate()); + + deleteCollection(col.getID()); + } + + + private String provenanceMetadataModified(String metadata) { + // Regex to match the date pattern + String datePattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"; + Pattern pattern = Pattern.compile(datePattern); + Matcher matcher = pattern.matcher(metadata); + String rspModifiedProvenance = metadata; + while (matcher.find()) { + String dateString = matcher.group(0); + rspModifiedProvenance = rspModifiedProvenance.replaceAll(dateString, ""); + } + return rspModifiedProvenance; + } + + private void objectCheck(DSpaceObject obj, String expectedMessage) throws Exception { + List metadata = obj.getMetadata(); + boolean contain = false; + for (MetadataValue value : metadata) { + if (!Objects.equals(value.getMetadataField().toString(), "dc_description_provenance")) { + continue; + } + if (provenanceMetadataModified(value.getValue()).contains(expectedMessage)) { + contain = true; + break; + } + } + if (!contain) { + Assert.fail("Metadata provenance do not contain expected data: " + expectedMessage); + } + } + + private Bundle createBundle(Item item, String bundleName) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + Bundle bundle = BundleBuilder.createBundle(context, item).withName(bundleName).build(); + context.restoreAuthSystemState(); + return bundle; + } + + private Bitstream createBitstream(Item item, String bundleName) + throws SQLException, AuthorizeException, IOException { + context.turnOffAuthorisationSystem(); + Bundle bundle = createBundle(item, Objects.isNull(bundleName) ? "test" : bundleName); + Bitstream bitstream = BitstreamBuilder.createBitstream(context, bundle, + toInputStream("Test Content", defaultCharset())).build(); + context.restoreAuthSystemState(); + return bitstream; + } + + private void deleteBitstream(Bitstream bitstream) throws SQLException, IOException { + int size = bitstream.getBundles().size(); + for (int i = 0; i < size; i++) { + deleteBundle(bitstream.getBundles().get(i).getID()); + } + BitstreamBuilder.deleteBitstream(bitstream.getID()); + } + + + private void deleteBundle(UUID uuid) throws SQLException, IOException { + BundleBuilder.deleteBundle(uuid); + } + + private ClarinLicenseLabel createClarinLicenseLabel(String label, boolean extended, String title) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + ClarinLicenseLabel clarinLicenseLabel = ClarinLicenseLabelBuilder.createClarinLicenseLabel(context).build(); + clarinLicenseLabel.setLabel(label); + clarinLicenseLabel.setExtended(extended); + clarinLicenseLabel.setTitle(title); + clarinLicenseLabelService.update(context, clarinLicenseLabel); + context.restoreAuthSystemState(); + return clarinLicenseLabel; + } + + private ClarinLicense createClarinLicense(String name, String definition) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + ClarinLicense clarinLicense = ClarinLicenseBuilder.createClarinLicense(context).build(); + clarinLicense.setDefinition(definition); + clarinLicense.setName(name); + HashSet clarinLicenseLabels = new HashSet<>(); + ClarinLicenseLabel clarinLicenseLabel = createClarinLicenseLabel("lbl", false, "Test Title"); + clarinLicenseLabels.add(clarinLicenseLabel); + clarinLicense.setLicenseLabels(clarinLicenseLabels); + clarinLicenseService.update(context, clarinLicense); + context.restoreAuthSystemState(); + return clarinLicense; + } + + private void deleteClarinLicenseLable(Integer id) throws Exception { + ClarinLicenseLabelBuilder.deleteClarinLicenseLabel(id); + } + + private void deleteClarinLicense(ClarinLicense license) throws Exception { + int size = license.getLicenseLabels().size(); + for (int i = 0; i < size; i++) { + deleteClarinLicenseLable(license.getLicenseLabels().get(i).getID()); + } + ClarinLicenseBuilder.deleteClarinLicense(license.getID()); + } + + private Collection createCollection() { + context.turnOffAuthorisationSystem(); + Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + context.restoreAuthSystemState(); + return col; + } + + private void deleteCollection(UUID uuid) throws SearchServiceException, SQLException, IOException { + CollectionBuilder.deleteCollection(uuid); + } + + private ClarinLicenseResourceMapping createResourceMapping(ClarinLicense license, Bitstream bitstream) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + ClarinLicenseResourceMapping resourceMapping = + ClarinLicenseResourceMappingBuilder.createClarinLicenseResourceMapping(context).build(); + context.restoreAuthSystemState(); + resourceMapping.setLicense(license); + resourceMapping.setBitstream(bitstream); + return resourceMapping; + } + + private void deleteResourceMapping(Integer id) throws Exception { + ClarinLicenseResourceMappingBuilder.delete(id); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/MetadataPatchSuite.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/MetadataPatchSuite.java index 423a4cbe3513..97e1491d03c9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/MetadataPatchSuite.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/test/MetadataPatchSuite.java @@ -11,10 +11,13 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.ws.rs.core.MediaType; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import org.junit.Assert; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; @@ -24,6 +27,7 @@ * Utility class for performing metadata patch tests sourced from a common json file (see constructor). */ public class MetadataPatchSuite { + static String PROVENANCE = "dc.description.provenance"; private final ObjectMapper objectMapper = new ObjectMapper(); private final JsonNode suite; @@ -36,6 +40,16 @@ public MetadataPatchSuite() throws Exception { suite = objectMapper.readTree(getClass().getResourceAsStream("metadata-patch-suite.json")); } + /** + * Initializes the suite by parsing the json file of tests. + * + * @param name name of resource + * @throws Exception if there is an error reading the file. + */ + public MetadataPatchSuite(String name) throws Exception { + suite = objectMapper.readTree(getClass().getResourceAsStream(name)); + } + /** * Runs all tests in the file using the given client and url, expecting the given status. * @@ -78,13 +92,32 @@ private void checkResponse(String verb, MockMvc client, MockHttpServletRequestBu .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) .andExpect(status().is(expectedStatus)); if (expectedStatus >= 200 && expectedStatus < 300) { - String responseBody = resultActions.andReturn().getResponse().getContentAsString(); - JsonNode responseJson = objectMapper.readTree(responseBody); - String responseMetadata = responseJson.get("metadata").toString(); - if (!responseMetadata.equals(expectedMetadata)) { - Assert.fail("Expected metadata in " + verb + " response: " + expectedMetadata - + "\nGot metadata in " + verb + " response: " + responseMetadata); - } + String responseBody = resultActions.andReturn().getResponse().getContentAsString(); + JsonNode responseJson = objectMapper.readTree(responseBody); + JsonNode responseMetadataJson = responseJson.get("metadata"); + if (responseMetadataJson.get(PROVENANCE) != null) { + // In the provenance metadata, there is a timestamp indicating when they were added. + // To ensure accurate comparison, remove that date. + String rspProvenance = responseMetadataJson.get(PROVENANCE).toString(); + // Regex to match the date pattern + String datePattern = "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z"; + Pattern pattern = Pattern.compile(datePattern); + Matcher matcher = pattern.matcher(rspProvenance); + String rspModifiedProvenance = rspProvenance; + while (matcher.find()) { + String dateString = matcher.group(0); + rspModifiedProvenance = rspModifiedProvenance.replaceAll(dateString, ""); + } + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNodePrv = objectMapper.readTree(rspModifiedProvenance); + // Replace the origin metadata with a value with the timestamp removed + ((ObjectNode) responseJson.get("metadata")).put(PROVENANCE, jsonNodePrv); + } + String responseMetadata = responseJson.get("metadata").toString(); + if (!responseMetadata.equals(expectedMetadata)) { + Assert.fail("Expected metadata in " + verb + " response: " + expectedMetadata + + "\nGot metadata in " + verb + " response: " + responseMetadata); + } } } } diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/test/item-metadata-patch-suite.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/test/item-metadata-patch-suite.json new file mode 100644 index 000000000000..fdbe61278546 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/test/item-metadata-patch-suite.json @@ -0,0 +1,219 @@ +{ + "tests": [ + { + "name": "clear metadata", + "patch": [ + { "op": "replace", + "path": "/metadata", + "value": {} + } + ], + "expect": {} + }, + { + "name": "add first title", + "patch": [ + { + "op": "add", + "path": "/metadata/dc.title", + "value": [ + { "value": "title 1" } + ] + } + ], + "expect": { + "dc.description.provenance" : [ + { "value" : "Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", "language" : "en", "authority" : null, "confidence" : -1, "place" : 0} + ], + "dc.title": [ + { "value": "title 1", "language": null, "authority": null, "confidence": -1, "place": 0} + ] + } + }, + { + "name": "add second title", + "patch": [ + { + "op": "add", + "path": "/metadata/dc.title/-", + "value": { "value": "最後のタイトル", "language": "ja_JP" } + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1} + ], + "dc.title": [ + { "value": "title 1", "language": null, "authority": null, "confidence": -1,"place": 0 }, + { "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 ,"place": 1} + ] + } + }, + { + "name": "insert zeroth title", + "patch": [ + { + "op": "add", + "path": "/metadata/dc.title/0", + "value": { + "value": "title 0" + } + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":2} + ], + "dc.title": [ + { "value": "title 0", "language": null, "authority": null, "confidence": -1 ,"place": 0 }, + { "value": "title 1", "language": null, "authority": null, "confidence": -1 ,"place": 1 }, + { "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 ,"place": 2 } + ] + } + }, + { + "name": "move last title up one", + "patch": [ + { + "op": "move", + "from": "/metadata/dc.title/2", + "path": "/metadata/dc.title/1" + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":2} + ], + "dc.title": [ + { "value": "title 0", "language": null, "authority": null, "confidence": -1 ,"place": 0 }, + { "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 ,"place": 1 }, + { "value": "title 1", "language": null, "authority": null, "confidence": -1 ,"place": 2 } + ] + } + }, + { + "name": "replace title 2 value and language in two operations", + "patch": [ + { + "op": "replace", + "path": "/metadata/dc.title/1/value", + "value": "title A" + }, + { + "op": "replace", + "path": "/metadata/dc.title/1/language", + "value": "en_US" + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":2} + ], + "dc.title": [ + { "value": "title 0", "language": null, "authority": null, "confidence": -1 ,"place": 0 }, + { "value": "title A", "language": "en_US", "authority": null, "confidence": -1 ,"place": 1 }, + { "value": "title 1", "language": null, "authority": null, "confidence": -1 ,"place": 2 } + ] + } + }, + { + "name": "copy title A to end of list", + "patch": [ + { + "op": "copy", + "from": "/metadata/dc.title/1", + "path": "/metadata/dc.title/-" + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":2} + ], + "dc.title": [ + { "value": "title 0", "language": null, "authority": null, "confidence": -1 ,"place": 0 }, + { "value": "title A", "language": "en_US", "authority": null, "confidence": -1 ,"place": 1 }, + { "value": "title 1", "language": null, "authority": null, "confidence": -1 ,"place": 2 }, + { "value": "title A", "language": "en_US", "authority": null, "confidence": -1 ,"place": 3 } + ] + } + }, + { + "name": "remove both title A copies", + "patch": [ + { + "op": "remove", + "path": "/metadata/dc.title/1" + }, + { + "op": "remove", + "path": "/metadata/dc.title/2" + } + ], + "expect": { + "dc.description.provenance":[ + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":0}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":1}, + {"value":"Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language":"en","authority":null,"confidence":-1,"place":2}, + {"value":"Item metadata (dc.title: title A) was deleted by first (admin) last (admin) (admin@email.com) on \nNo. of bitstreams: 0", + "language":"en","authority":null,"confidence":-1,"place":3}, + {"value":"Item metadata (dc.title: title A) was deleted by first (admin) last (admin) (admin@email.com) on \nNo. of bitstreams: 0", + "language":"en","authority":null,"confidence":-1,"place":4} + ], + "dc.title": [ + { "value": "title 0", "language": null, "authority": null, "confidence": -1 ,"place": 0 }, + { "value": "title 1", "language": null, "authority": null, "confidence": -1 ,"place": 1 } + ] + } + }, + { + "name": "remove all titles", + "patch": [ + { + "op": "remove", + "path": "/metadata/dc.title" + } + ], + "expect": { + "dc.description.provenance": [ + { + "value": "Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language": "en", "authority": null, "confidence": -1, "place": 0}, + {"value": "Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language": "en", "authority": null, "confidence": -1, "place": 1}, + {"value": "Item metadata (dc.title) was added by first (admin) last (admin) (admin@email.com) on ", + "language": "en", "authority": null, "confidence": -1, "place": 2}, + {"value": "Item metadata (dc.title: title A) was deleted by first (admin) last (admin) (admin@email.com) on \nNo. of bitstreams: 0", + "language": "en", "authority": null, "confidence": -1, "place": 3}, + {"value": "Item metadata (dc.title: title A) was deleted by first (admin) last (admin) (admin@email.com) on \nNo. of bitstreams: 0", + "language": "en", "authority": null, "confidence": -1, "place": 4} + ] + } + } + ] +} diff --git a/dspace/config/spring/api/core-services.xml b/dspace/config/spring/api/core-services.xml index 305d41f64ee5..f6ad5b1e5938 100644 --- a/dspace/config/spring/api/core-services.xml +++ b/dspace/config/spring/api/core-services.xml @@ -179,5 +179,7 @@ + + From ca40fa920dfb3669e824bf8342c0aef90ec409f6 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 24 Jan 2025 12:05:13 +0100 Subject: [PATCH 35/45] Facet result for suggestion (#854) * Retrieve the suggestions from the facet results instead of a specific bunch of results * Removed not required log. --- .../MetadataValueRestRepository.java | 83 +++++++++++-------- .../repository/SuggestionRestController.java | 44 ++++------ .../rest/MetadataValueRestRepositoryIT.java | 21 +++-- 3 files changed, 80 insertions(+), 68 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java index d192abb7b6c7..f32049088b13 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataValueRestRepository.java @@ -112,9 +112,52 @@ public Page findByValue(@Parameter(value = "schema", r searchValue = searchValue.replace(":", ""); } + List metadataValueWrappers = new ArrayList<>(); + // Perform a search, but only retrieve the total count of results, not the actual objects + DiscoverResult searchResult = createAndRunDiscoverResult(context, metadataField, searchValue, 0); + long totalResultsLong = searchResult.getTotalSearchResults(); + // Safe conversion from long to int + int totalResults = (totalResultsLong > Integer.MAX_VALUE) ? + Integer.MAX_VALUE : (int) totalResultsLong; + // Perform the search again, this time retrieving the actual results based on the total count + searchResult = createAndRunDiscoverResult(context, metadataField, searchValue, totalResults); + for (IndexableObject object : searchResult.getIndexableObjects()) { + if (object instanceof IndexableItem) { + // Get the item which has the metadata with the search value + List metadataValues = itemService.getMetadataByMetadataString( + ((IndexableItem) object).getIndexedObject(), metadataField); + + // The Item could have more metadata than the metadata with searching value, filter that metadata + String finalSearchValue = searchValue; + List filteredMetadataValues = metadataValues.stream() + .filter(metadataValue -> metadataValue.getValue().contains(finalSearchValue)) + .collect(Collectors.toList()); + + // convert metadata values to the wrapper + List metadataValueWrapperList = + this.convertMetadataValuesToWrappers(filteredMetadataValues); + metadataValueWrappers.addAll(metadataValueWrapperList); + } + } + + // filter eu sponsor -> do not return eu sponsor suggestions for items where eu sponsor is used. + // openAIRE API + if (StringUtils.equals(schemaName, "local") && StringUtils.equals(elementName, "sponsor")) { + metadataValueWrappers = filterEUSponsors(metadataValueWrappers); + } + metadataValueWrappers = distinctMetadataValues(metadataValueWrappers); + + return converter.toRestPage(metadataValueWrappers, pageable, utils.obtainProjection()); + } + + /** + * Create a discover query and retrieve the results from the Solr Search core. + */ + private DiscoverResult createAndRunDiscoverResult(Context context, String metadataField, + String searchValue, int maxResults) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(metadataField, searchValue); + this.createDiscoverQuery(metadataField, searchValue, maxResults); if (ObjectUtils.isEmpty(discoverQuery)) { throw new IllegalArgumentException("Cannot create a DiscoverQuery from the arguments."); @@ -124,41 +167,12 @@ public Page findByValue(@Parameter(value = "schema", r if (StringUtils.isNotBlank(normalizedQuery)) { discoverQuery.setQuery(normalizedQuery); } - - List metadataValueWrappers = new ArrayList<>(); try { - DiscoverResult searchResult = searchService.search(context, discoverQuery); - for (IndexableObject object : searchResult.getIndexableObjects()) { - if (object instanceof IndexableItem) { - // Get the item which has the metadata with the search value - List metadataValues = itemService.getMetadataByMetadataString( - ((IndexableItem) object).getIndexedObject(), metadataField); - - // The Item could have more metadata than the metadata with searching value, filter that metadata - String finalSearchValue = searchValue; - List filteredMetadataValues = metadataValues.stream() - .filter(metadataValue -> metadataValue.getValue().contains(finalSearchValue)) - .collect(Collectors.toList()); - - // convert metadata values to the wrapper - List metadataValueWrapperList = - this.convertMetadataValuesToWrappers(filteredMetadataValues); - metadataValueWrappers.addAll(metadataValueWrapperList); - } - } + return searchService.search(context, discoverQuery); } catch (SearchServiceException e) { log.error("Error while searching with Discovery", e); throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage()); } - - // filter eu sponsor -> do not return eu sponsor suggestions for items where eu sponsor is used. - // openAIRE API - if (StringUtils.equals(schemaName, "local") && StringUtils.equals(elementName, "sponsor")) { - metadataValueWrappers = filterEUSponsors(metadataValueWrappers); - } - metadataValueWrappers = distinctMetadataValues(metadataValueWrappers); - - return converter.toRestPage(metadataValueWrappers, pageable, utils.obtainProjection()); } public List filterEUSponsors(List metadataWrappers) { @@ -172,11 +186,10 @@ public List distinctMetadataValues(List convertMetadataValuesToWrappers(List loadSuggestionsFromSolr(String autocompleteCus DiscoverQuery discoverQuery = new DiscoverQuery(); // Process the custom query if it contains the specific query parameter `?query=` autocompleteCustom = updateAutocompleteAndQuery(autocompleteCustom, discoverQuery); - // TODO - search facets and process facet results instead of indexable objects - discoverQuery.setMaxResults(500); + DiscoverFacetField facetField = new DiscoverFacetField(autocompleteCustom, + DiscoveryConfigurationParameters.TYPE_STANDARD, + -1, // no limit (get all facet values) + DiscoveryConfigurationParameters.SORT.VALUE // sorting order + ); + discoverQuery.addFacetField(facetField); // return only metadata field values discoverQuery.addSearchField(autocompleteCustom); @@ -255,30 +260,17 @@ private List loadSuggestionsFromSolr(String autocompleteCus */ private void processSolrSearchResults(DiscoverResult searchResult, String autocompleteCustom, String searchValue, List results) { - searchResult.getIndexableObjects().forEach(object -> { - if (!(object instanceof IndexableItem)) { - return; - } - IndexableItem item = (IndexableItem) object; - // Get all search documents for the item. - searchResult.getSearchDocument(item).forEach((searchDocument) -> { + searchResult.getFacetResult(autocompleteCustom).forEach(facetResult -> { + String displayedValue = facetResult.getDisplayedValue(); + if (displayedValue.contains(searchValue)) { + // Create a new VocabularyEntryRest object VocabularyEntryRest vocabularyEntryRest = new VocabularyEntryRest(); - // All values from Item's specific index - it could contain values we are not looking for. - // The must be filtered out. - List docValues = searchDocument.getSearchFieldValues(autocompleteCustom); - - // Filter values that contain searchValue - List filteredValues = docValues.stream() - .filter(value -> value.contains(searchValue)) - .collect(Collectors.toList()); - - // Add filtered values to the results. It contains only values that contain searchValue. - filteredValues.forEach(value -> { - vocabularyEntryRest.setDisplay(value); - vocabularyEntryRest.setValue(value); - results.add(vocabularyEntryRest); - }); - }); + vocabularyEntryRest.setDisplay(displayedValue); + vocabularyEntryRest.setValue(displayedValue); + + // Add the filtered value to the results + results.add(vocabularyEntryRest); + } }); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataValueRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataValueRestRepositoryIT.java index 41f9112cbc09..2c27e1729854 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataValueRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataValueRestRepositoryIT.java @@ -86,7 +86,8 @@ public void findAll() throws Exception { // Get title metadata from the item MetadataValue titleMetadataValue = this.getTitleMetadataValue(); - getClient().perform(get("/api/core/metadatavalues") + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/core/metadatavalues") .param("size", String.valueOf(100))) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) @@ -105,7 +106,8 @@ public void findOne() throws Exception { // Get title metadata from the item MetadataValue titleMetadataValue = this.getTitleMetadataValue(); - getClient().perform(get("/api/core/metadatavalues/" + titleMetadataValue.getID())) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/core/metadatavalues/" + titleMetadataValue.getID())) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$", Matchers.is( @@ -160,7 +162,8 @@ public void findByValue_searchValue() throws Exception { String metadataQualifier = titleMetadataValue.getMetadataField().getQualifier(); String searchValue = titleMetadataValue.getValue(); - getClient().perform(get(SEARCH_BYVALUE_ENDPOINT) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get(SEARCH_BYVALUE_ENDPOINT) .param("schema", metadataSchema) .param("element", metadataElement) .param("qualifier", metadataQualifier) @@ -197,7 +200,8 @@ public void findByValue_searchValueWithStringAndNumber() throws Exception { String metadataQualifier = titleMetadataValue.getMetadataField().getQualifier(); String searchValue = titleMetadataValue.getValue(); - getClient().perform(get(SEARCH_BYVALUE_ENDPOINT) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get(SEARCH_BYVALUE_ENDPOINT) .param("schema", metadataSchema) .param("element", metadataElement) .param("qualifier", metadataQualifier) @@ -234,7 +238,8 @@ public void findByValue_searchValueIsNumber() throws Exception { String metadataQualifier = titleMetadataValue.getMetadataField().getQualifier(); String searchValue = titleMetadataValue.getValue(); - getClient().perform(get(SEARCH_BYVALUE_ENDPOINT) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get(SEARCH_BYVALUE_ENDPOINT) .param("schema", metadataSchema) .param("element", metadataElement) .param("qualifier", metadataQualifier) @@ -260,7 +265,8 @@ public void shouldReturnDistinctSuggestion() throws Exception { String metadataQualifier = titleMetadataValue.getMetadataField().getQualifier(); String searchValue = titleMetadataValue.getValue(); - getClient().perform(get(SEARCH_BYVALUE_ENDPOINT) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get(SEARCH_BYVALUE_ENDPOINT) .param("schema", metadataSchema) .param("element",metadataElement) .param("qualifier",metadataQualifier) @@ -312,7 +318,8 @@ public void shouldReturnOneSuggestionWhenInputHasMoreMetadataValues() throws Exc String metadataQualifier = titleMetadataValue.getMetadataField().getQualifier(); String searchValue = titleMetadataValue.getValue(); - getClient().perform(get(SEARCH_BYVALUE_ENDPOINT) + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get(SEARCH_BYVALUE_ENDPOINT) .param("schema", metadataSchema) .param("element",metadataElement) .param("qualifier",metadataQualifier) From b260927f5ea56dd0646daf4a7fe66a4b8f5401a3 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 24 Jan 2025 12:48:22 +0100 Subject: [PATCH 36/45] Healthcheck process (#853) * Checkouted files from the `healthcheck-process` branch and updated the code, also added integration test. * Changed the name of the test. * Updated the description to be consistent with another descriptions. * Refactored the calling the check.run method, use `check.report` instead. --- .../dspace/app/healthreport/HealthReport.java | 219 ++++++++++++++++++ .../HealthReportScriptConfiguration.java | 54 +++++ .../main/java/org/dspace/health/Check.java | 11 + .../config/spring/api/scripts.xml | 5 + .../org/dspace/scripts/HealthReportIT.java | 43 ++++ dspace/config/modules/healthcheck.cfg | 6 +- dspace/config/spring/api/scripts.xml | 5 + dspace/config/spring/rest/scripts.xml | 5 + 8 files changed, 344 insertions(+), 4 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java create mode 100644 dspace-api/src/main/java/org/dspace/app/healthreport/HealthReportScriptConfiguration.java create mode 100644 dspace-api/src/test/java/org/dspace/scripts/HealthReportIT.java diff --git a/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java new file mode 100644 index 000000000000..c390ea2bdb3e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java @@ -0,0 +1,219 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.healthreport; + +import static org.apache.commons.io.IOUtils.toInputStream; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.Locale; +import java.util.Map; +import javax.mail.MessagingException; + +import org.apache.commons.cli.ParseException; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.core.Email; +import org.dspace.core.I18nUtil; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.health.Check; +import org.dspace.health.Report; +import org.dspace.health.ReportInfo; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * This class is used to generate a health report of the DSpace instance. + * @author Matus Kasak (dspace at dataquest.sk) + * @author Milan Majchrak (dspace at dataquest.sk) + */ +public class HealthReport extends DSpaceRunnable { + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final Logger log = LogManager.getLogger(HealthReport.class); + private EPersonService ePersonService; + + /** + * Checks to be performed. + */ + private static final LinkedHashMap checks = Report.checks(); + + /** + * `-i`: Info, show help information. + */ + private boolean info = false; + + /** + * `-e`: Email, send report to specified email address. + */ + private String email; + + /** + * `-c`: Check, perform only specific check by index (0-`getNumberOfChecks()`). + */ + private int specificCheck = -1; + + /** + * `-f`: For, specify the last N days to consider. + * Default value is set in dspace.cfg. + */ + private int forLastNDays = configurationService.getIntProperty("healthcheck.last_n_days"); + + /** + * `-o`: Output, specify a file to save the report. + */ + private String fileName; + + @Override + public HealthReportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("health-report", HealthReportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + // `-i`: Info, show help information. + if (commandLine.hasOption('i')) { + info = true; + return; + } + + // `-e`: Email, send report to specified email address. + if (commandLine.hasOption('e')) { + email = commandLine.getOptionValue('e'); + handler.logInfo("\nReport sent to this email address: " + email); + } + + // `-c`: Check, perform only specific check by index (0-`getNumberOfChecks()`). + if (commandLine.hasOption('c')) { + String checkOption = commandLine.getOptionValue('c'); + try { + specificCheck = Integer.parseInt(checkOption); + if (specificCheck < 0 || specificCheck >= getNumberOfChecks()) { + specificCheck = -1; + } + } catch (NumberFormatException e) { + log.info("Invalid value for check. It has to be a number from the displayed range."); + return; + } + } + + // `-f`: For, specify the last N days to consider. + if (commandLine.hasOption('f')) { + String daysOption = commandLine.getOptionValue('f'); + try { + forLastNDays = Integer.parseInt(daysOption); + } catch (NumberFormatException e) { + log.info("Invalid value for last N days. Argument f has to be a number."); + return; + } + } + + // `-o`: Output, specify a file to save the report. + if (commandLine.hasOption('o')) { + fileName = commandLine.getOptionValue('o'); + } + } + + @Override + public void internalRun() throws Exception { + if (info) { + printHelp(); + return; + } + + ReportInfo ri = new ReportInfo(this.forLastNDays); + + StringBuilder sbReport = new StringBuilder(); + sbReport.append("\n\nHEALTH REPORT:\n"); + + int position = -1; + for (Map.Entry check_entry : Report.checks().entrySet()) { + ++position; + if (specificCheck != -1 && specificCheck != position) { + continue; + } + + String name = check_entry.getKey(); + Check check = check_entry.getValue(); + + log.info("#{}. Processing [{}] at [{}]", position, name, new SimpleDateFormat( + "yyyy-MM-dd HH:mm:ss.SSS").format(new Date())); + + sbReport.append("\n######################\n\n").append(name).append(":\n"); + check.report(ri); + sbReport.append(check.getReport()); + } + + // save output to file + if (fileName != null) { + Context context = new Context(); + context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier())); + + InputStream inputStream = toInputStream(sbReport.toString(), StandardCharsets.UTF_8); + handler.writeFilestream(context, fileName, inputStream, "export"); + + context.restoreAuthSystemState(); + context.complete(); + } + + // send email to email address from argument + if (email != null) { + try { + Email e = Email.getEmail(I18nUtil.getEmailFilename(Locale.getDefault(), "healthcheck")); + e.addRecipient(email); + e.addArgument(sbReport.toString()); + e.send(); + } catch (IOException | MessagingException e) { + log.error("Error sending email:", e); + } + } + + handler.logInfo(sbReport.toString()); + } + + @Override + public void printHelp() { + handler.logInfo("\n\nINFORMATION\nThis process creates a health report of your DSpace.\n" + + "You can choose from these available options:\n" + + " -i, --info Show help information\n" + + " -e, --email Send report to specified email address\n" + + " -c, --check Perform only specific check by index (0-" + (getNumberOfChecks() - 1) + ")\n" + + " -f, --for Specify the last N days to consider\n" + + " -o, --output Specify a file to save the report\n\n" + + "If you want to execute only one check using -c, use check index:\n" + checksNamesToString() + "\n" + ); + } + + /** + * Convert checks names to string. + */ + private String checksNamesToString() { + StringBuilder names = new StringBuilder(); + int pos = 0; + for (String name : checks.keySet()) { + names.append(String.format(" %d. %s\n", pos++, name)); + } + return names.toString(); + } + + /** + * Get the number of checks. This is used for the `-c` option. + */ + public static int getNumberOfChecks() { + return checks.size(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReportScriptConfiguration.java new file mode 100644 index 000000000000..771cc70aadb9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReportScriptConfiguration.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.healthreport; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * This class represents a HealthReport that is used in the CLI. + * @author Matus Kasak (dspace at dataquest.sk) + */ +public class HealthReportScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableclass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableclass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableclass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + options.addOption("i", "info", false, + "Show help information."); + options.addOption("e", "email", true, + "Send report to this email address."); + options.getOption("e").setType(String.class); + options.addOption("c", "check", true, + String.format("Perform only specific check (use index from 0 to %d, " + + "otherwise perform default checks).", HealthReport.getNumberOfChecks() - 1)); + options.getOption("c").setType(String.class); + options.addOption("f", "for", true, + "Report for last N days. Used only in general information for now."); + options.getOption("f").setType(String.class); + options.addOption("o", "output", true, + "Save report to the file."); + + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/health/Check.java b/dspace-api/src/main/java/org/dspace/health/Check.java index 40f29c15f73a..89b01ca95685 100644 --- a/dspace-api/src/main/java/org/dspace/health/Check.java +++ b/dspace-api/src/main/java/org/dspace/health/Check.java @@ -50,4 +50,15 @@ protected void error(Throwable e, String msg) { } } + public String getErrors() { + return errors_; + } + + public String getReport() { + return report_; + } + + public long getTook() { + return took_; + } } diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index 738e11f7b432..ce760106fbf0 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -96,4 +96,9 @@ + + + + + diff --git a/dspace-api/src/test/java/org/dspace/scripts/HealthReportIT.java b/dspace-api/src/test/java/org/dspace/scripts/HealthReportIT.java new file mode 100644 index 000000000000..c4e732d49990 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/scripts/HealthReportIT.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.scripts; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.junit.Test; + +/** + * Integration test for the HealthReport script + * @author Milan Majchrak (milan.majchrak at dataquest.sk) + */ +public class HealthReportIT extends AbstractIntegrationTestWithDatabase { + @Test + public void testDefaultHealthcheckRun() throws Exception { + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "health-report" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(1)); + assertThat(messages, hasItem(containsString("HEALTH REPORT:"))); + } +} diff --git a/dspace/config/modules/healthcheck.cfg b/dspace/config/modules/healthcheck.cfg index e45407abdfb1..69300b2f0029 100644 --- a/dspace/config/modules/healthcheck.cfg +++ b/dspace/config/modules/healthcheck.cfg @@ -5,10 +5,8 @@ # If you use the Pre-DSpace-3.0 embargo feature, you might want to # add 'Embargo items (Pre-3.0),' to the following list. healthcheck.checks = General Information,\ - Checksum,\ Item summary,\ - User summary,\ - Log Analyser Check + User summary plugin.named.org.dspace.health.Check = \ org.dspace.health.InfoCheck = General Information,\ @@ -18,5 +16,5 @@ plugin.named.org.dspace.health.Check = \ org.dspace.health.UserCheck = User summary,\ org.dspace.health.LogAnalyserCheck = Log Analyser Check -# report from the last N days (where dates are applicable) +# default value of the report from the last N days (where dates are applicable) healthcheck.last_n_days = 7 diff --git a/dspace/config/spring/api/scripts.xml b/dspace/config/spring/api/scripts.xml index d913a30b668e..ddf2c273232c 100644 --- a/dspace/config/spring/api/scripts.xml +++ b/dspace/config/spring/api/scripts.xml @@ -91,4 +91,9 @@ + + + + + diff --git a/dspace/config/spring/rest/scripts.xml b/dspace/config/spring/rest/scripts.xml index a8ef279383ae..d24612203f7e 100644 --- a/dspace/config/spring/rest/scripts.xml +++ b/dspace/config/spring/rest/scripts.xml @@ -74,4 +74,9 @@ + + + + + From 018af11e90a58e055ab9ccf15caa9b5a177212b2 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 24 Jan 2025 12:58:39 +0100 Subject: [PATCH 37/45] Changed matomo url to some valid URL (#856) * Changed matomo url to some valid URL * Update the url to some non-existing url. --- dspace/config/clarin-dspace.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index f27d6830c213..1b8bc27ad669 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -154,7 +154,7 @@ matomo.auth.token = 26388b4164695d69e6ee6e2dd527b723 matomo.site.id = 1 matomo.tracker.bitstream.site_id = 1 matomo.tracker.oai.site_id = 1 -matomo.tracker.host.url = http://url:port/matomo.php +matomo.tracker.host.url = http://localhost.changeme/matomo.php matomo.custom.dimension.handle.id = 1 statistics.cache-server.uri = http://cache-server.none From eacd6d366e694aee970f529c85b7d06d7e1b8f48 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Fri, 24 Jan 2025 12:59:01 +0100 Subject: [PATCH 38/45] Oai indexing after res policy change (#855) * Fixed nullpointer exception - when the dso object was null and was accessed. * The condition was added to the boolean property. --- .../main/java/org/dspace/xoai/app/XOAI.java | 4 +-- .../ResourcePolicyRestRepository.java | 21 ++++++++++++ .../app/rest/utils/SolrOAIReindexer.java | 32 +++++++++++++------ 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index 4930dd5956c3..764da35fbf05 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -431,8 +431,8 @@ private SolrInputDocument index(Item item) if (!discoverable && item.isHidden()) { discoverable = true; } - doc.addField("item.deleted", - (item.isWithdrawn() || (!discoverable) || (isEmbargoed ? isPublic : false))); + boolean isDeleted = item.isWithdrawn() || (!discoverable) || (isEmbargoed && isPublic); + doc.addField("item.deleted", isDeleted); /* * An item that is embargoed will potentially not be harvested by incremental diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java index 0b77f96b9b5f..05c00303fcca 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ResourcePolicyRestRepository.java @@ -11,6 +11,7 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.UUID; import javax.servlet.http.HttpServletRequest; @@ -26,11 +27,13 @@ import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.repository.patch.ResourcePatch; import org.dspace.app.rest.utils.DSpaceObjectUtils; +import org.dspace.app.rest.utils.SolrOAIReindexer; import org.dspace.app.rest.utils.Utils; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.EPerson; @@ -76,6 +79,9 @@ public class ResourcePolicyRestRepository extends DSpaceRestRepository Date: Fri, 24 Jan 2025 13:08:43 +0100 Subject: [PATCH 39/45] Run build action every 4 hours to be sure tests are still passing. --- .github/workflows/build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c58d6dc73c48..5554d6a37e5d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,6 +5,8 @@ name: Build # Run this Build for pushes to our main and all PRs on: + schedule: + - cron: '0 */4 * * *' push: branches: - dtq-dev From 7670b7ba2feae0287164d84926b162845cbcc489 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Thu, 30 Jan 2025 08:51:34 +0100 Subject: [PATCH 40/45] The dc.type should not be repeatable (#861) --- dspace/config/submission-forms.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 19aa68c31d78..fc96e5743722 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -58,7 +58,7 @@ dc type - true + false dropdown Type of the resource: "Corpus" refers to text, speech and multimodal corpora. From aca89cdbe0f43c7ce6cc9f81dff4636b1f688be3 Mon Sep 17 00:00:00 2001 From: Kasinhou <129340513+Kasinhou@users.noreply.github.com> Date: Mon, 3 Feb 2025 16:44:22 +0100 Subject: [PATCH 41/45] UFAL/Health report send to more email adresses Co-authored-by: Matus Kasak --- .../org/dspace/app/healthreport/HealthReport.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java index c390ea2bdb3e..cd6dc9afc074 100644 --- a/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java +++ b/dspace-api/src/main/java/org/dspace/app/healthreport/HealthReport.java @@ -58,7 +58,7 @@ public class HealthReport extends DSpaceRunnable 0) { try { Email e = Email.getEmail(I18nUtil.getEmailFilename(Locale.getDefault(), "healthcheck")); - e.addRecipient(email); + for (String recipient : emails) { + e.addRecipient(recipient); + } e.addArgument(sbReport.toString()); e.send(); } catch (IOException | MessagingException e) { From 9b012dd27850876125b4e231cca9c8d41b4195f9 Mon Sep 17 00:00:00 2001 From: milanmajchrak <90026355+milanmajchrak@users.noreply.github.com> Date: Mon, 3 Feb 2025 16:45:53 +0100 Subject: [PATCH 42/45] UFAL/Fixed missing `*` character in the required input field --- dspace/config/submission-forms.xml | 7 +++---- dspace/config/submission-forms_cs.xml | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index fc96e5743722..9e2fd8c6e89c 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -3234,14 +3234,13 @@ - + - - + + \ No newline at end of file diff --git a/dspace/config/submission-forms_cs.xml b/dspace/config/submission-forms_cs.xml index c81cff0133b1..5a366a75aa8d 100644 --- a/dspace/config/submission-forms_cs.xml +++ b/dspace/config/submission-forms_cs.xml @@ -3188,12 +3188,12 @@ - + - + From 97fa90511f0c067ad5b51d171f7f5770c34a022f Mon Sep 17 00:00:00 2001 From: Paurikova2 <107862249+Paurikova2@users.noreply.github.com> Date: Tue, 4 Feb 2025 13:50:24 +0100 Subject: [PATCH 43/45] Res policy for submitter (#862) * created res policy if eperson is submitter * allowed submitter edit metadata only in collections from cfg * test for ID collection allowed by cfg * created res policy during item installation, tests * added check if user is submitter * renamed property, default value if is not defined * added comments why the property is not commented --- .../content/InstallItemServiceImpl.java | 80 ++++++ .../test/data/dspaceFolder/config/local.cfg | 5 + .../dspace/app/rest/ItemRestRepositoryIT.java | 263 ++++++++++++++++++ dspace/config/clarin-dspace.cfg | 5 + 4 files changed, 353 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java index f622b98d5ea9..aee5fc74ec0e 100644 --- a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java @@ -9,13 +9,20 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.logic.Filter; import org.dspace.content.logic.FilterUtils; @@ -26,10 +33,12 @@ import org.dspace.core.Context; import org.dspace.discovery.IsoLangCodes; import org.dspace.embargo.service.EmbargoService; +import org.dspace.eperson.EPerson; import org.dspace.event.Event; import org.dspace.identifier.Identifier; import org.dspace.identifier.IdentifierException; import org.dspace.identifier.service.IdentifierService; +import org.dspace.services.ConfigurationService; import org.dspace.supervision.SupervisionOrder; import org.dspace.supervision.service.SupervisionOrderService; import org.springframework.beans.factory.annotation.Autowired; @@ -57,6 +66,9 @@ public class InstallItemServiceImpl implements InstallItemService { @Autowired(required = true) protected SupervisionOrderService supervisionOrderService; @Autowired(required = false) + private ResourcePolicyService resourcePolicyService; + @Autowired(required = true) + protected ConfigurationService configurationService; Logger log = LogManager.getLogger(InstallItemServiceImpl.class); @@ -106,6 +118,12 @@ public Item installItem(Context c, InProgressSubmission is, // the default policies from the collection. itemService.inheritCollectionDefaultPolicies(c, item, collection, false); + //Allow submitter to edit item + if (isCollectionAllowedForSubmitterEditing(item.getOwningCollection()) && + isInSubmitGroup(c.getCurrentUser(), item.getOwningCollection().getID())) { + createResourcePolicy(c, item, Constants.WRITE); + } + return item; } @@ -337,4 +355,66 @@ private void addLanguageNameToMetadata(Context c, Item item) throws SQLException itemService.addMetadata(c, item, "local", "language", "name", null, langName); } } + + /** + * Checks if the provided collection is allowed for submitter metadata editing. + * + * This method retrieves a list of allowed collection names and IDs from the system configuration, + * and checks if the given collection's name or ID matches any of the allowed values. + * + * @param collection The collection to be checked. + * @return True if the collection's name or ID is in the allowed list for submitter editing, false otherwise. + * @throws SQLException If there is an issue retrieving the configuration or querying the database. + */ + private boolean isCollectionAllowedForSubmitterEditing(Collection collection) throws SQLException { + if (Objects.isNull(collection)) { + return false; + } + // Retrieve the allowed collections for submitter edition as an array + String[] editableCollections = configurationService.getArrayProperty("allow.edit.metadata", new String[] {}); + + if (Objects.isNull(editableCollections) || editableCollections.length == 0) { + return false; + } + + Set allowedNamesOrIds = new HashSet<>(Arrays.asList(editableCollections)); + + // Check if the provided collection's name or ID is in the allowed set + return allowedNamesOrIds.contains(collection.getName()) || + allowedNamesOrIds.contains(collection.getID().toString()); + } + + /** + * Checks if the given EPerson is in a submitter of the collection. + * A submit group is identified by the name containing "SUBMIT" and the collection UUID. + * + * @param eperson the EPerson whose is checked + * @param collectionUUID the UUID of the collection to check group names + * @return true if the EPerson is in a submitter, false otherwise + */ + private boolean isInSubmitGroup(EPerson eperson, UUID collectionUUID) { + return eperson.getGroups().stream() + .anyMatch(group -> group.getName().contains("SUBMIT") && + group.getName().contains(collectionUUID.toString())); + } + + /** + * Creates a resource policy for an item, granting the specified action to the current user. + * + * @param context The current DSpace context. + * @param item The item for which the resource policy is being created. + * @param action The action to be assigned to the resource policy (e.g., write, read). + * @throws SQLException If there is an issue interacting with the database. + * @throws AuthorizeException If the current user does not have sufficient authorization + * to create the resource policy. + */ + private void createResourcePolicy(Context context, Item item, int action) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + ResourcePolicy resPol = resourcePolicyService.create(context); + resPol.setAction(action); + resPol.setdSpaceObject(item); + resPol.setEPerson(item.getSubmitter()); + context.restoreAuthSystemState(); + } + } diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 14ff9e3a72a3..0f9daea975e4 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -319,3 +319,8 @@ autocomplete.custom.separator.solr-subject_ac = \\|\\|\\| autocomplete.custom.separator.solr-title_ac = \\|\\|\\| autocomplete.custom.allowed = solr-author_ac,solr-publisher_ac,solr-dataProvider_ac,solr-dctype_ac,solr-subject_ac,solr-handle_title_ac,json_static-iso_langs.json,solr-title_ac +##### METADATA EDIT ##### +#### name || id +### these collections allow submitters to edit metadata of their items +# property is not commented, because of tests +allow.edit.metadata = \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 4e6337d33fef..8f370af03d3c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -40,8 +40,10 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.ws.rs.core.MediaType; @@ -63,6 +65,8 @@ import org.dspace.app.rest.repository.ItemRestRepository; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; @@ -87,6 +91,7 @@ import org.dspace.content.RelationshipType; import org.dspace.content.WorkspaceItem; import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -121,6 +126,11 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest { @Autowired private ConfigurationService configurationService; + @Autowired + private ResourcePolicyService resourcePolicyService; + @Autowired + private ItemService itemService; + private Item publication1; private Item author1; private Item author2; @@ -2586,6 +2596,259 @@ public void updateTestEPersonWithoutPermissionForbidden() throws Exception { } } + @Test + public void createItemAsSubmitterRestPolicyCorrectCollectionIDTest() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = null; + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + try { + //** GIVEN ** + //1. A community with one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + //2. create a normal user to use as submitter + EPerson submitter = EPersonBuilder.createEPerson(context) + .withEmail("submitter@example.com") + .withPassword("dspace") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .withSubmitterGroup(submitter).build(); + //Set property + configurationService.setProperty("allow.edit.metadata", col1.getID().toString()); + context.setCurrentUser(submitter); + + //4. a workspace item + WorkspaceItem wsitem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Submission Item") + .withIssueDate("2017-10-17") + .grantLicense() + .build(); + context.restoreAuthSystemState(); + + // get the submitter auth token + String authToken = getAuthToken(submitter.getEmail(), "dspace"); + + // submit the workspaceitem to start the workflow - archived Item + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + wsitem.getID()) + .contentType(textUriContentType)); + + // Find created item + Iterator it = itemService.findByCollection(context, col1); + item = it.hasNext() ? it.next() : null; + + // Find all resource policy + List list = resourcePolicyService.find(context, item); + boolean found = list.stream() + .anyMatch(resPol -> resPol.getAction() == Constants.WRITE && + resPol.getEPerson() != null && + resPol.getEPerson().getID().equals(submitter.getID())); + // submitter is a member of the submit group, collection ID is in property + // the resource policy was created + assert found; + } finally { + if (Objects.nonNull(item)) { + // remove the item if any + ItemBuilder.deleteItem(item.getID()); + } + } + } + + @Test + public void createItemAsSubmitterCollectionNameNotInConfigTest() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = null; + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + try { + //** GIVEN ** + //1. A community with one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + //2. create a normal user to use as submitter + EPerson submitter = EPersonBuilder.createEPerson(context) + .withEmail("submitter@example.com") + .withPassword("dspace") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .withSubmitterGroup(submitter).build(); + context.setCurrentUser(submitter); + + //4. a workspace item + WorkspaceItem wsitem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Submission Item") + .withIssueDate("2017-10-17") + .grantLicense() + .build(); + context.restoreAuthSystemState(); + + // get the submitter auth token + String authToken = getAuthToken(submitter.getEmail(), "dspace"); + + // submit the workspaceitem to start the workflow - archived Item + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + wsitem.getID()) + .contentType(textUriContentType)); + + // Find created item + Iterator it = itemService.findByCollection(context, col1); + item = it.hasNext() ? it.next() : null; + + // Find all resource policy + List list = resourcePolicyService.find(context, item); + boolean found = list.stream() + .anyMatch(resPol -> resPol.getAction() == Constants.WRITE && + resPol.getEPerson() != null && + resPol.getEPerson().getID().equals(submitter.getID())); + // submission is member of submit group, collection name is not in property + // the resource policy was not created + assert !found; + } finally { + if (Objects.nonNull(item)) { + // remove the item if any + ItemBuilder.deleteItem(item.getID()); + } + } + } + + @Test + public void createItemAsAdminRestPolicyTest() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = null; + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + try { + //** GIVEN ** + //1. A community with one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + //2. create a normal user to use as submitter + EPerson user = EPersonBuilder.createEPerson(context) + .withEmail("submitter@example.com") + .withPassword("dspace") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1") + .withAdminGroup(user).build(); + //Set property + configurationService.setProperty("allow.edit.metadata", col1.getID().toString()); + + context.setCurrentUser(user); + + //4. a workspace item + WorkspaceItem wsitem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Submission Item") + .withIssueDate("2017-10-17") + .grantLicense() + .build(); + context.restoreAuthSystemState(); + + // get the submitter auth token + String authToken = getAuthToken(user.getEmail(), "dspace"); + + // submit the workspaceitem to start the workflow - archived Item + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + wsitem.getID()) + .contentType(textUriContentType)); + + // Find created item + Iterator it = itemService.findByCollection(context, col1); + item = it.hasNext() ? it.next() : null; + + // Find all resource policy + List list = resourcePolicyService.find(context, item); + boolean found = list.stream() + .anyMatch(resPol -> resPol.getAction() == Constants.WRITE && + resPol.getEPerson() != null && + resPol.getEPerson().getID().equals(user.getID())); + // submitter is not a member of the submit group + // the resource policy was not created + assert !found; + } finally { + if (Objects.nonNull(item)) { + // remove the item if any + ItemBuilder.deleteItem(item.getID()); + } + } + } + + @Test + public void createItemAsSubmitterRestPolicyCorrectCollectionNameTest() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = null; + //Set property + String colName = "Collection 1"; + configurationService.setProperty("allow.edit.metadata", colName); + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + try { + //** GIVEN ** + //1. A community with one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + //2. create a normal user to use as submitter + EPerson submitter = EPersonBuilder.createEPerson(context) + .withEmail("submitter@example.com") + .withPassword("dspace") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName(colName) + .withSubmitterGroup(submitter).build(); + + context.setCurrentUser(submitter); + + //4. a workspace item + WorkspaceItem wsitem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Submission Item") + .withIssueDate("2017-10-17") + .grantLicense() + .build(); + context.restoreAuthSystemState(); + + // get the submitter auth token + String authToken = getAuthToken(submitter.getEmail(), "dspace"); + + // submit the workspaceitem to start the workflow - archived Item + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + wsitem.getID()) + .contentType(textUriContentType)); + + // Find created item + Iterator it = itemService.findByCollection(context, col1); + item = it.hasNext() ? it.next() : null; + + // Find all resource policy + List list = resourcePolicyService.find(context, item); + boolean found = list.stream() + .anyMatch(resPol -> resPol.getAction() == Constants.WRITE && + resPol.getEPerson() != null && + resPol.getEPerson().getID().equals(submitter.getID())); + // submitter is a member of the submit group, collection name is in property + // the resource policy was created + assert found; + } finally { + if (Objects.nonNull(item)) { + // remove the item if any + ItemBuilder.deleteItem(item.getID()); + } + } + } + @Test public void createItemFromExternalSources() throws Exception { //We turn off the authorization system in order to create the structure as defined below diff --git a/dspace/config/clarin-dspace.cfg b/dspace/config/clarin-dspace.cfg index 1b8bc27ad669..53bb118d3136 100644 --- a/dspace/config/clarin-dspace.cfg +++ b/dspace/config/clarin-dspace.cfg @@ -301,3 +301,8 @@ autocomplete.custom.separator.solr-dataProvider_ac = \\|\\|\\| autocomplete.custom.separator.solr-dctype_ac = \\|\\|\\| autocomplete.custom.separator.solr-author_ac = \\|\\|\\| autocomplete.custom.allowed = solr-author_ac,solr-publisher_ac,solr-dataProvider_ac,solr-dctype_ac,solr-subject_ac,solr-handle_title_ac,json_static-iso_langs.json + +##### METADATA EDIT ##### +#### name || id +### these collections allow submitters to edit metadata of their items +#allow.edit.metadata = \ No newline at end of file From 208eac3661f8deec246942d6abb771de2691b77a Mon Sep 17 00:00:00 2001 From: milanmajchrak Date: Tue, 4 Feb 2025 15:29:31 +0100 Subject: [PATCH 44/45] Do not merge flyway script with default licenses. --- ....6_2024.10.25__insert_default_licenses.sql | 202 ------------------ 1 file changed, 202 deletions(-) delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2024.10.25__insert_default_licenses.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2024.10.25__insert_default_licenses.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2024.10.25__insert_default_licenses.sql deleted file mode 100644 index 301cdecf88af..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2024.10.25__insert_default_licenses.sql +++ /dev/null @@ -1,202 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- --- Default License Definitions with Associated License Labels and Mappings --- NOTE: Do NOT use this file if your repository already contains licenses. --- - --- --- Data for Name: license_definition; Type: TABLE DATA; Schema: public; Owner: dspace --- - --- Insert data into tables only if the tables (license_definition, license_label, license_label_extended_mapping) --- are empty -DO $$ -BEGIN - -- Check if the 'license_definition' table is empty - PERFORM 1 FROM public.license_definition LIMIT 1; - IF NOT FOUND THEN - -- Check if the 'license_label' table is empty - PERFORM 1 FROM public.license_label LIMIT 1; - IF NOT FOUND THEN - -- Check if the 'license_label_extended_mapping' table is empty - PERFORM 1 FROM public.license_label_extended_mapping LIMIT 1; - IF NOT FOUND THEN - -- All three tables are empty, so insert data - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (1, 'GNU General Public Licence, version 3', 'http://opensource.org/licenses/GPL-3.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (2, 'GNU General Public License, version 2', 'http://www.gnu.org/licenses/gpl-2.0.html', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (3, 'The MIT License (MIT)', 'http://opensource.org/licenses/mit-license.php', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (4, 'Artistic License 2.0', 'http://opensource.org/licenses/Artistic-2.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (5, 'Artistic License (Perl) 1.0', 'http://opensource.org/licenses/Artistic-Perl-1.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (6, 'Attribution-NonCommercial-NoDerivs 3.0 Unported (CC BY-NC-ND 3.0)', 'http://creativecommons.org/licenses/by-nc-nd/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (7, 'BSD 2-Clause "Simplified" or "FreeBSD" license', 'http://opensource.org/licenses/BSD-2-Clause', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (8, 'BSD 3-Clause "New" or "Revised" license', 'http://opensource.org/licenses/BSD-3-Clause', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (9, 'Attribution-NonCommercial 3.0 Unported (CC BY-NC 3.0)', 'http://creativecommons.org/licenses/by-nc/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (10, 'Attribution-NonCommercial-ShareAlike 3.0 Unported (CC BY-NC-SA 3.0)', 'http://creativecommons.org/licenses/by-nc-sa/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (11, 'Attribution-NoDerivs 3.0 Unported (CC BY-ND 3.0)', 'http://creativecommons.org/licenses/by-nd/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (12, 'Attribution-ShareAlike 3.0 Unported (CC BY-SA 3.0)', 'http://creativecommons.org/licenses/by-sa/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (13, 'Creative Commons - Attribution 3.0 Unported (CC BY 3.0)', 'http://creativecommons.org/licenses/by/3.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (14, 'Public Domain Dedication (CC Zero)', 'http://creativecommons.org/publicdomain/zero/1.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (22, 'Apache License 2.0', 'http://opensource.org/licenses/Apache-2.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (24, 'Affero General Public License 1 (AGPL-1.0)', 'http://www.affero.org/oagpl.html', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (25, 'Affero General Public License 3 (AGPL-3.0)', 'http://opensource.org/licenses/AGPL-3.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (26, 'Common Development and Distribution License (CDDL-1.0)', 'http://opensource.org/licenses/CDDL-1.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (27, 'Eclipse Public License 1.0 (EPL-1.0)', 'http://opensource.org/licenses/EPL-1.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (28, 'GNU General Public License 2 or later (GPL-2.0)', 'http://opensource.org/licenses/GPL-2.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (29, 'GNU Library or "Lesser" General Public License 2.1 (LGPL-2.1)', 'http://opensource.org/licenses/LGPL-2.1', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (30, 'GNU Library or "Lesser" General Public License 2.1 or later (LGPL-2.1)', 'http://opensource.org/licenses/LGPL-2.1', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (31, 'GNU Library or "Lesser" General Public License 3.0 (LGPL-3.0)', 'http://opensource.org/licenses/LGPL-3.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (32, 'Mozilla Public License 2.0', 'http://opensource.org/licenses/MPL-2.0', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (33, 'Open Data Commons Attribution License (ODC-By)', 'http://opendatacommons.org/licenses/by/summary/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (34, 'Open Data Commons Open Database License (ODbL)', 'http://opendatacommons.org/licenses/odbl/summary/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (35, 'Open Data Commons Public Domain Dedication and License (PDDL)', 'http://opendatacommons.org/licenses/pddl/summary/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (36, 'Public Domain Mark (PD)', 'http://creativecommons.org/publicdomain/mark/1.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (37, 'Creative Commons - Attribution 4.0 International (CC BY 4.0)', 'http://creativecommons.org/licenses/by/4.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (38, 'Creative Commons - Attribution-ShareAlike 4.0 International (CC BY-SA 4.0)', 'http://creativecommons.org/licenses/by-sa/4.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (39, 'Creative Commons - Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)', 'http://creativecommons.org/licenses/by-nd/4.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (40, 'Creative Commons - Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)', 'http://creativecommons.org/licenses/by-nc/4.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (41, 'Creative Commons - Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)', 'http://creativecommons.org/licenses/by-nc-sa/4.0/', NULL, NULL, NULL, 0, NULL); - INSERT INTO public.license_definition (license_id, name, definition, user_registration_id, label_id, created_on, confirmation, required_info) VALUES (42, 'Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)', 'http://creativecommons.org/licenses/by-nc-nd/4.0/', NULL, NULL, NULL, 0, NULL); - - -- - -- Data for Name: license_label; Type: TABLE DATA; Schema: public; Owner: dspace - -- - - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (20, 'PUB', 'Publicly Available', NULL, false); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (21, 'ACA', 'Academic Use', '\x89504e470d0a1a0a0000000d494844520000003500000016080600000060ded4e8000000097048597300000b1300000b1301009a9c1800000e256943435050686f746f73686f70204943432070726f66696c65000078dad5976750135c97c74f0a49e848ef448a204d7a514120201d218054a524a199841002084a13055190a28822a0f4a248095d407a551101b1a0d81ebb8f05441031fbc17df77d6676f6c3ceec97fd7dfacf9973e79c39e7cedcfb0710ec0c6230a84821001a9dc524da5ae1bd7d7cf1d847c00302200e1ab02d8814cdb074757582ff91d5fb80000098d50a6230a8f0bf43804c89260120340120921c4da201208a01c09ac460b20010cf00e06e1c8bc1024036018028d3dbc71700390c00a2a17ff40300100dfea33f028028d383480040010008c4924259002821008c309d1c4e07e05606c09893c282c800826400d0a4d122c90082b900a016fc8fb3a1ffd07ffa040000314224359289772258e30941d4f06066108b4286ff6368d4987fd54300001f85eee90e006a00200b0488042a440213f0e00404b0063c102008a8100ec1c08420600105c82cca61160000219211cf0c0f0d63e12d190c2a45136f4f27696be2f5747475e0ff13de3ebef83f6a851f10008008fff5ef987f3c80411800fac8bf630c5e806b5f00a4ff91a7ee06205f0bc06e27c53063ff3563c46d803f77e53f412100900020053ed0899045e4226590eda840b402fa19572fa6015b822be6aee7b9c9fb806f4560aba0a7d0f12d23c23f444dc458e26d12eb5286d20c9956d955791d85087cf5d6792511650b95a06df9aa5d6a6fd44103af49d0f2d60edf71442747b742ef8a7eb941a361b35191f179933cd3c49de45d4ebbf5cca4ccc1fce59edb167d96b556058434eb4336ae7b0d6de5ecc0ee8dfdbc43a7e379a72467928bf53e255784eb73b71e62917b9c879be7764fcefe59af1aef241f575f65df6f7ec3fe670e781d5439f877407be0b1208760c1e07ba47cb23b4594f220e47ca8679848d854f8c9089b43e8437d54164d8bf6825e11e9cb10620c47c5330da221fa05eb564c53ecb938c66162bc5e825cc2fa914747d989f949b1c9c4148d545ceaeb63fd69178e479e704c974fff9171e7645126f594f96981d34fb2dab253cf38e588e72ce5d6e431f3cdcf22cf8e9dcb29f0382f797eb1b0e4c2818bf2179f16155ef22b162fbe579253ea7299f7f2f8958c3242d9eff2a60a5aa56ae55f55c5d50ed59f6a926b856acbeaf4eafaea9dea97ae1ebaba762dbd41b8a1ecfa8eebbd8d4e8d8f9b429a569bd35b445baeb0b5d983adfb5a9fb645b5437b4e8742474b27a173be2ba28bd37df686ea8dde1e9f9e95de937dca7d8337836ffeeebf326033f07e306768cfd0dbe1c211a751d4e8cdb1b471e284f2247272796aedb6e49da8e9040010001d2041157c47b820d8485d642fca1bcd831ee72ac4c46129b8406e12cf11de5cbeebfc0f049142c65b42850b44a6c470e2161234c9cb5213d2ebb29a72eef2690a2df8fb5b7f2a892a6ba9386fa3aa26aae56caf52efd118d27ca2f546fbf98e4f3a3f75d7f556f4970dbe1bae1abd327e6572df747467c7aedadd4566d9e647f7845bb85a1a58491250844fd673361d7b2fd9a6d985da5b3b6c73e473fce034ea5ceb92becfdfd5c84dc8ed3d71dcbdd423dad369bffcfe2f5e23de053e145f633facdfacff9503f483bb02b8031e06d6063182f790b849b3e4524a68884ec846e870d8e970cf08c5885787d8d42334029d877e37f202831c65c294894647bf654dc4d4c69e8ca31df688374a9048583e327fb42fb134e94872400a2155e518fad8cbb4a9e3d527b2d21919ee270d33c533574fdd3fdd91559a9d7ce6608e592e3e0f97f7317ffa6ccbb97305acf33e859617142f222ffe553479a9a5b8b824a1d4ffb2c315d532c1b2f5f2c58abecacaaaeceaf81a4aad439d563db6fef6d5d3d7ec1bd00d5dd7631ab51bdf375535fbb708b64cb2d35a4d5a3fb655b47b77f0740c74c674a9773dedceba6171e3474f632fa54fb26ff6667abf59ffca40fd60e090ccd083e1f323c451fed1a9b133e36e1312137393c553945b5ab7566f0fddc99b0ebe6b3cc333f3f45eebec99b943f3f6f79517300bef1ecc3cbcf048efbff65f03eb0877441752035987d2438da0c95cfc5cc39864ac258e07f79cbb87a78837858fce4f16f017dc2fe4b98528ec2d724034482c423c5e2243324faa44ba45664076546e41febdc257fc9a2242895719a722bc4d50954f4d7a3bb73a5a03a1b1acf9516b517b6247bb4e896e8a1e459f60a06d88337c61d46f5c6672dc94bcd36a97ccaeefbbe7cddacdcfeda15bd8596a5821ac16093dd6c536717b5d6cb5ec78ed5edbf73b5c746438ed7596755e7599da77c5f5b09b035199b8ea3ee151ea19b5dfca4bd8eb9537db27c5d7dd4fceef8d7feb81d4836e010a016f039b82e2836d4882a487e40a0a354437e47b68575872b84b8448c4c2a1126a384d9df685de1999c4708a128e7ac4ac8aa6b3cc63b863e662cbe3e8874de2d1f1330925476847cd12f9131793ae2627a410531553578e8da7151f8f3d619b2e9bbe9271eb647966fc29e269f52c64d6a36cf699829cc85cbb3c7cde7afefdb39de70a0a98e71d0a552fa02f2c5dec2d2abd1453ec5562582a7a19ae20cbf8ca852b442b25aa64aa156a146a65ea24ebc5ae8a5e136910bd2ed628d924d32cd322c696685568536bd7ead0ea34e8b2ea76bb11d413dd9bde577273ac7f6960634879d8612461b479ecdd84f264e054f9adb53beed34d3342f7e26697e73317441eb01f452dea3fd95cda78817e65fbfaeabb9d1f777ece5eee5c6fe47000fefc1900003046001702007ce400dc6c01b276036c1b0490e00170e507f03005a4b62820be0f02c22ae71fefc74ef085a3500203f0128146a8219c100cc4394417620989416a233d9189c82ae45de43a4a09e5864a44d5a31ea27168137418ba087d870bc565c245e3aae07a8c11c3b8624e63c6b038ac2df604760cc78fdb8f2bc2bde4d6e266710ff388f19079d8bc58de00de4e3e613e1adf38bf0aff69fe65012f816e4115c112213ea123425fb6d0b77c12a6097f116189ac8ba689f189e58a4b88574a684b744b3a4a2e49c54af34857ca58ca3c9465c889c875c9fbc8ff52a8c4efc3ffde5aaf7840495869443951c544e5c7b62ed564b5bddb79b73f54afd488d774d052d3e6d25eda31a253ab7b4a2f4e9f62e06fe86eb4cfd8c664afa9fd4e975d81bbc3cde2cc8fed29b3e8b25cb4fadb5ad8c6746fa86dbe5dbd7d9543ae63aa538233c325765f946b941b8548778ff448f6ccda9fed55e87ddd67d077c6efb53fe7a07c8059a057d0b1e072d2040515a21b4a0e2b0a9f3d04542bda49fa5d864c5438b32dfa774c606cfb61fe787ac2dda3868915c95b5292523969d4e39fd269191f33c34e7dce6264afe51ccf53cb6f39e758305f9874d1f6924709f5f289b28e8ae96a74ad497dd4b5aaeb2f9ba5d8216da51d6fba77f744f74d0ff00f058d748c23267d6e5dbef361c66c3673fec603e1476e8ba94f879f7d7b29f197dd1be2bbd80f499f123fc77cf55d565879bd7a714d73bd6683fb1771f3e0ef140e0700742104cec3087c45c823ec112cc465c424621589473a21139035c839140aa5870a42e5a34650eb682d7420fa3cfa361796cb8a2b91ab87eb27c614138fe9c502d61a9b89bd8793c585e13ab9b9b97db9af72ffe6f1e269e6e5e5a5f00ef0a9f0a5f2bde077e4ef16c00b6408ac0a860b3e1172131ad9b2734bafb0a1709bc82e9101513bd119313fb1d7e231125889524913c9fb520c695ee97a19679915d9223973b90ff2a50a8e0a6bf8a6ad118a0a8a8f95ca94835554553e6e6b524d51dbb75d6efb0ff53b1ad59a27b542b46d77e8e848eb627457f45eeadf331832ec376a35669bb04d5b7676edeadb3d6a3667fe6ccf7b4bb4950041d5dadcc67f6f826da15d837d8b43bde355a746e72e97b17d33ae0b6e4bc4af1e084ff9fd5a5e26de9e3e2cdf4cbf72ff9b079e076002e5831c832348d964366531141b66141e1271f1d020f51b5d3d92c42889ba1d0dac5d31b1b1cd71dfe24d12a28e341cfd91b427393d65ea187f9acbf1b213df32b44f9eca5c382d9fc5cc9ecac1e746e70d9e153c175c3058b8f5425191c5a50f25459703cac4ca672bb3aa9d6b95ebde5c6d6cc86a0c6c3666cbb6aeb42f767675b7f694f4a5f69307fd86778f1a8ce327456ef1df5e9b5e9a19996d9faf5d38fd90fad8e189f992f8b36f2fe65e35bc3ef676ff7bf90f1f3eddf89cf1d57959606572b560cdf927ff46cf269dc30100538883365841e823a2102d8815a421f230b2178544d9a2b251f7d10a682aba930bc3e5c555c7b58171c1546138586f6c134e08178dbbc76dc05dc283e561f22cf17af00ef399f175f26bf2b3050c043a0409825342078596b79c12d614be25122b2a293a299628ae23fe5ca2509228252ab5205d20e323ab24fb4dae5b3e4f8184d7dfcabbf5a3e2a852a572864ae4363f552735f3eddaea1a1a8a9a4a5af2da6a3bb4748c75edf4fcf4230d720c6b8ca68d7f982aecb4dd15bdbbc1ecf51e790b7fcb4b564bd69a36c97be7eca4ed590eb34e2ace675c3eba12ddaeb94b79247abef5b2f7eef255f4cb3f803b9810b01c1412fc88ec4e990e7509bb19617da88f66416f675844f547ef618dc5bac4cdc60724bc3f1a9fb8969c912a73ace5b8ed89e90cd2c9f553c5597ad97339cc3cbefcee73c482cdc2e28bc245b1971e97b8960e5c312aabab90af2cafde56535fa75dcfbe66dcd0d368d134d262cf7edec66c5fed4ce8dabc71b657a5afb65f7de0da90f670e3a8ced8b50981c9ac5bbcb799775ede759b699a1598a3ce4f2ca83c487978ffb1e4a2f7930b4f179e893d27bec87879fdd5d05f8f5f73de8abd537aaff861eb47854f727f4bfdbdf9f9e997beaf25dfc296759797579abfd356f1abf33f62d6f8d62ad70dd66ffe74fc39b961ba51fb4be2d7d15fe39b629bc19bd737377edbfecefdfd90a3c4a172da391c80e8107dbd3f0e434a0e80e70b87f36e1f8000016093c4e1ac9971381b2f0130750067c3fff85000008c1040712600c05073ed7ff341ff017c3ece4feba5845a000000206348524d0000592b0000595f0000deb900008399000070e20000c0e400002f4b00001978d02b64290000042c4944415478dad498594c5c5518c77ff7323bcb40ca32436929ed80d2c5071393425994362969215553496c6dd3343e92b8251a1f348d892f7d50138dc697be9214ab95124b09d0326030b46a84a235b294916518d619665866e6dee3c3c4a1bcc89c312afe93efe5de73fff7ffffbe73cff9ee518410c431d5e1c2ebaec2ebae26f47b01db1da9bb26705475e3a872937f6cf8cfcb4adcd48da79a3105d3c82daac55909f6d26def09ff2f30dd03beb136c26941eaefbeb061aaf3b90fc9010a2b5e657d00d60721eaddfea60c0e301f02f31330defb11b3c0d1af5e4bb9f4d2be2a166fd4b337ff3cfe26088f821604c1f60f2d18d3bbd20759aec3cc3c9843a44f2aa2bdee630ad31ba13bf10c897fb11a8acce06a185ffec44060d8852117c2124f0b79570b0195cc3481aa4a3eab48e8324d42c0e732a0874dac79404f2e7da7dfcee6e1b4814b2ffba9ab58dd346a3da2f0d9b534ae77db08ae2a988c507b789537ce2c936addfcc2bfe24918ba0774d5a402108d82ae241e1aa0c1d0b09187d30654155afa7782b5207e0f0ddebf62a7a9dd464db985cbefeca1fe780e2d3d563eb8b61b8431311e195dd1686cfd88994a6ea2b7f6da301be1d993b95cfddac784bf80022601f0cc18f8a6cf426d958577df2a85e014358fcde1cc4ca5b8d4099919303bb8254f3288554a261bba023a44c270abdf4cf993664e1dcf420868b91385141be830369502c0c1833910f2826f00825e2e548c72649f17966712e291d6163715950c4da1e7272b81159563cfe451e20851e810b4b6cfa35b0b405358f0c74cd96c29b0ba049ab211f3a3b03c9b108fb4b6f8f4138ae40709ad7d1600aedf5aa4ad2dc4ea9a8e6f29c2772376caed2a0e7b6c21989b8f40448b09047e183651bc3342ba35b60a6ec58322929c7e8f663181585852f8f6672316b3c2c8d82a43630a51618c755b1dcb60cda6c419c56880ce9e45a2584083f92595c64fed9cb99c8d1086847864b56d544a97cb44db3d2b9aaef0deeb851c7f3a13740d80b38dbfd1dde72770c1c90edb1ca78f4468ea5ee1f9378dd41e4aa36bc044380aa74e3a515c0768fbbc7f4b9e0c7d2ec94a45e4a2f59e05b3112acb32c1330483b761f03627f6cf130eebdcbc9b02aa91576afd9cafb3b214d0b9d2610583898b0d3bb878361fe62712e391d4166b68afeeed244baf914a8500f69781d5063fde89671880bc3db0ab043c0fc0e7014541e417b3647192956586b5102ccec0d448ac33d98ae7fb76b9322daa5d8af8e2f19ba4aed722ed2a09188d1089fc93cd1f84cc6d06325cc3847f0555935afd9282169517a94a8cd75320c335ac52d4d04c980ea9bd407ab3fe1b21a32b4c07450dcd065ce7dc8c7f791fff808a4a0dff57e874613f701fd739f723bff3654daccde6a2881aa976ffbf861020942e2c393eeafb5edc7c460130d5e962a6a702afbb9ae0f8ee6d6f28add083a3aa9bbcca5ef28fc60f5efe180026b7b96a2d80c8d60000000049454e44ae426082', false); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (22, 'RES', 'Restricted Use', NULL, false); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (23, 'BY', 'Attribution Required', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000004374944415478daec5bd171e23010154cfe4d07d081e9c0a402e800ae024a800e4c07d001e9c04e05261598abc05001a7e7d1ddf88824af644931096f66c70cd842fbb45aed4aeb17e617632e332e532113213a9c85e48deb6ff64088b9a45c4a2e3747528a36e33e2bbee45238545a2585f8af5e295e06505c66155f4a04e677f6058acb2c22f8d458f740f17bd9d8283230bc3fe272e0b2203f10456c369bb1e9745a5f2793492d52f77f3ed7723a9d589ee7b55caf5793fee5a26f571fa31e9938b9e572793b1e8fb7ae401b688b1369e21b9c4f093458513ab0d96c6e6559de5ca3aaaaba6d2211954b1248cacfe7732f8acb8880458422216a5be230222e4cdd14599651aca11309ad733e8ee35b5114c69ddfeff7b5c52449520b3ee33b1b6b401f083e21b22160dfa63c3a600a9df9e2371b10a6446613dd39571ed6d236776d2c8a48c2c624c2ab5c2b0fc08b5356115b1048f8e40f861202765c465266c6e33a38198d46bdccc60e8703e303a4bda58d804417e5bdbdbdf556f97fa1201f20449f0a4cef13a87b02b6aa27d334adc3d92ea090d795603c8f81d240a963a29a3b58aa5c0531bab51bbfd9fa17437f204da38fae3db32ab69791e03aa06a21bb90797ea76b332592e3d3aaf6faf8ec032dab4e4ccaef43c4f73ef3060d01699380c2c7dcc7d4314863a5d3a2ebf4d3f882b219f34b6fb289cf9b408cdf75a7076d74f5399af6c743b16f2fc562b1e8b4245d2e97ceeb7ad736a083262e980d4570f0794d4c92de073d54602b4e8189d202340f3d1c34015c6d01a3ef4e409b0548e951eddc3e2234ba4c86160f7d2702d890fd703c097812f024e049801438a4fc2ed0e902024e3f9880f30bf20dd5e662d76870b7dbfd97ccbcbebe929ecbb2ccd91e2180e3761d01b92c1f00011e6370dbd0d53501271448ccb948b751793aedb4238301ad1ec3f5ffc28a148516aba1b0002970d0f0e8c0e86baa4c7210705539c296fdf58700fc906afeb3460166904d5146dc060bb4359e36e300e5506fb7db871e7d8df97f9adfde0f46425a40cbe8973246bc1f8d852460bd5e1b1f8dd53108f3706e1f9a009c34317dc98c1209f350bd119280b6035846a82f56d606a1e12eab826f0208455305c579e214c14b898c6f026c4a645498330f4552be0820164f1a1753a76d24984e071f04106b059565726dd9096203e50121cedc102e53b337ea7dd44c1471fe6ab5621f1f1fdadb44b66b55414eaa1077b1449a02c515be4b658d488019faaaf4b8af394060c602578c93df15c079becb9aa2bf80bf2156897b7b8d26d2e50bb2f01905165daabef02cda208e7853797271f4c082082c27462922aa37e10021d89d513943383fec21fe7d65e6fdfdddb46fc8f07e85c83413467c832490542276098aa82d5608244766f93e802bc4ec6bde1fcc8425f606898993ec38e2bd52fc1e63b1c7e8f23de242b43976ddd941003f01973f15d7115394e4dc85ae17b15d7f12d7abaf0efe1160005144fe5e042ce31f0000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (24, 'SA', 'Share Alike', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000004ab4944415478dadc5b8d75a24010461bc00eb403ed403bd00e4c075e07da81d781d7014905980af02a402b502be0f6e3418ee4606766990572f3de2426c2b2f3b333dfcc2ea3c02f4d0d2f0a5e199e156ca38be187e173f1197c0bbe1141e8a3e1c470a6c44931e67cc8826f0dc78a42db94b11d92e04bc36907827fe5b46f454c3bb238c5711f4b039abf0f40f82aef5d041909af0f0dff34fcc2be210c83d56a152c168bfcf76c36cbb98eaed76bce97cb25389fcf393f9f4fc9fc903936869f3eac1e4a22fb76bbcda228cada12c6c0584691922039f7b1de13aee0699a66da74bfdfb3fd7ecf55c45d53092167bd2f974b2f82d729024aee4a09a4dbc322a7d329eb9ae238e678436b2558859f4ea7599224595f046f98cfe79c9810ba087fb40d8c0763027d137349c452e1d7df41f82a3194b057097a43145ea004563c38d9029eabf0c810c7e3315bafd7b5eb16ffc3774875ae71851113c8a5b0b469d0656288d6489152688b00eb925da004223b580ba8c6e20696914e64b7dbb5c6f850049428553a5145caac0fb7f2909e442c3500110f6abd206aba4162011fc25761b6d25248eab07eeb878210c87c96bd124fc0b5dc8cb06bba5082ef11b4baa8fdb91e4978c19184bc88de1242c0a2268f09d5a53afccdadf6f01c6e3ab6c482b40a7c6a2f92a4218ef5b13ca8897363087729a09f6019676a85bd12d043ad7d098264e4f2fc7b2e59c6cab3c15ec3fd298b49bb431c8fe28e6931cebe11fc48a22dd6af96b52431859ba100c19ba0f1d8fc98d481023430293a1c0ef9752f2f748f14d76d361b51494a5d8fa62987d0906da0bc3beb9cfe24a0075e20ad258800c68e5390c53246fd179ac8cf457806a61761026f0aa094e02a3c37b80e42014d4a682b3c4701dc60dd74ff58ab773e994cf2a06494f0b12384bf2d016830a4e201554f0086d0ea16531e8014378825e083287ca111031a97003629fb26ad39d8c61917677006a9000ed0e100368b2cd7717120c91965958870341a3532679275f4fafa6afdbe0cb81461bbdda680b3f0a67ff1e46ce63a01abf56fb75b6beb13c6bc58cb6149cf5f2b5895c481d8dc4c4395c3a146094b556e927e0067fb9b5bae1399646a6d89a18ee692a5e4fca4045b9125d8fb677b14a725a6d214e57471aa753c1406214ac6260aa7a728318ca429aad216e77881c64e11772949dae2d68d1109ac75d907e4b2a4b822ac9f8ab6c624fd415f3b43d2ca92d897dc8a3747b94547b520d2129e0a9e5a9ba3eadbe3509ae07c5fe3264a97dbe3ea0724caf37ddc085f063adce3f22c62f9259ca3b20046d7a66e31f037a0251a20522a8fc096c761abcd14344e00a9cbe3b42e842ef2dbdb9bb5416cf8f77f77488a09a0c487a98f9a81a9e7b38271e0481115a4a4458e76c788217cea7a50927d425c1aa935889961ba392f5c2e892ebc015667620cf513e309b750f1717e18f1865b296a0b5f5542c4cde5b012b6b8db640bdc8b3184a832a9d6f9daafcc94e7090e921b801d90dfc1c8f94dad2c6084c7e3f18117dedfdfa573fb65f887af5766be42e621bd34752f7a1a9d524861858e3892b8bc2f6fe8e3fdc1b878f660682909922d2d3e28c1eb4e9dee02fd97a7773e5c7dd4419c58057f5f9f9f149fad4563f0f9f5f9b3cfa8fe478001008252cd9d372775a60000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (25, 'NC', 'Noncommercial', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000005024944415478dad45b3d76da401016760a77a24c876f20a5730737801b403a3a8e00398192130027c0a53bf009049d3be113002750f6535679b2bcff3f923cefcdc37991969dd1ce37df8c865ee0570644474463aa8f544572a67aa87cbe075f4822a209d18c68ee4833ba66d465c3a744538746f334a5dfd529c3b3060c679d8a561d81f8deb76038eb441887c6bde17d0ba22f0a80c695300c83a7a7a7e0f1f1df12b7dbcd74a9ef44e7f4ef57dd9b7bbafb26ba213ad13174329904711c173a1a8d84d71f0e87e07c3e179fd0f777ad0470a459e7e6e3c887aa20478ccea7d3699ea6696e2b5863b158146b2a86c4c547b688e8c252c397cb657eb95c72d78235b1b6a2239c3a41c9f8f178ecc570962370229a7242284b717822bbdd2e6f5af6fb7d3e180cbc3a411af35114e55996e56d094ec37038547142689205d64467dcb888a202a5fbfdbe91774b942f65369bfd4f89ba827bb7dbad2c3bfcd06577c2276f1bef00b4ea9a38d23682ac2339094b96a1771c86f7dbd793f7259bcd26204e105db262e101cb01309e691d019d4e1a5f7502c104e12532070c792c0f8ceef9f9b9b3c697823de2417124ae1750f70c0f3191683e9f17606323a0b8d820144fabcaffafd76bf0f6f6565c03279b3afae1e1a1a0dc02508413feb0fe6328231700bff57a6d4467419474aa3cd3ef2a4542969860b153dd1c08882ae5851136e52ebecb2443606f02da9cb2905f7b7365d1c32343b6c657d58470d5536d4da37a7d6fb54138a2faa4b06157c69b8682e41424550738ebe5819a62c30ac4c4abf10a0429ab727e2e10f9686321ae61581543707aea753f36efa29f20d84b912fc7bcf82e8f3236a2d190b08ae7b2dc85ba12c1de8b6cb0e4d5f7ac868442092ad5a6459082933bda43fbcc16e2f8c3bf414c56ab554154c8f115b12da9606d10a1a644d0878cb900a8927b718d423d2ec4822449bc7792b04f11105ae7dd12274c1d811845ccfb6aae4852b2bb38d56c5c72f9848b6e725d1a71409d05da002642cb65afb17107b8c4893a67f8520ea8e384697894ef1c4c71a27507b48d139d71405b38a1ed8026fbfdc009dd86890e4ec8d2a031116a90b058e1848c0831871cb0501be2aa02ade204d826e7ba3db718427cb98a739d1729ae4b70d82148c309b71c764586aac753d6dc9085800d4e88cae1d0473786d70f2c0ba02ac6e0a8ca5223ee735177b01a225c20acf7046c8db7d13a2639e01399525354371d4a5a50460a5cf0c02712a5b6b8495f4e80ba46a94d95f569d61d91f28b1193f21427c7a608b219c0c03d12c0ccb45e8dd9a444139657323b1b3179351688263f5d10233803eb40eba703a186d071d10c91a4d34c361196bb0c85a6264414df067d7afaf5f98093683a04dd557485bb2a78c58e3d0ac66e312bb4954d88609484692516c6d82bbea8abc69f4e27e13c95ca88cc4d341d862fc01775c9098ac6afe809ff20df3817bfd21b56222760d2c374b4adfeb2c2741d45e3318ff7cb64fdb58ca4b4d137a8b24e0506988a06259d4c88b7d13b50649bcee685a54e301d65f1d866773a31aefc5b016cce872370dc35cae02cf0f40bb3b50e8fc731b569ae82d480126bd6145a31df3370c292971d44e3b5406a203d5e8de3b38efa2058a51e8fc762225582ecbc49d09f4da4de61d0ce4fe544f13e6e9a7f84b4a9d0b6f13b9b34e742a2a09ddf0feee949ec8c0c75a64d2d9f78a70c674d9d2e02b7bf234ee99a03d79bed358013233a8c84cf7e3998241094ac57cadf8ff4f3e66b837f051800dd4b1284b44b8df00000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (26, 'ReD', 'Redeposit Modified', NULL, true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (27, 'ND', 'No Derivative Works', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000003bd4944415478daec5be191b230100dcefdc70eb003b502ec403bd00eb403ed403bd00e382b002b800ef0ab00ad802f8f89339e9784040910bc37b3a33348645f369bcdeee210b3f0a84c98cca88c98c89050b95189d877c83f6211a0f49e4a4c25af496236e6b8cb8a2fa984352a2d2363d925c57d2a69038abf4ada36115e43335e26611b4b03cc671d50fe59b6551471347fef5239505929dfe0ba64369b91c964527c8e46a34278b85eaf85244942a2282ae47ebfeb3c1f768e0595bb895977753cfb72b9cc8320c8df05c6c05894481d273936b1de6355c5d334cdeb469665f976bb552522ab93045765bdfbbe6f44711e1120b929124acd1e33723c1ef3a61186a18a35bc4d825479cff3f2388ef3b6006b188fc72a3ec1ada2fc5e3630fe180fd036149744a8abfcdc06e59fa140c2b616a7d745e5354850f2074799c3ebaaf28a3ea17429f83206db74783a2494ec0ed20394f0708320c416608b2c3945eacd3eccca3694f803ae1504a21bc0a86d28590a312fd617c6f6b602cb567547588b7ed8447cdf921520d023034600f77c4f0f38c2b3bb0d180e8764b158882e2f1e04b82c6dfd0babd58ad80e09019859cf6161ef37ef17d4840a1675806cce66b331a6d0e17028b24bba9620c82cadbe44b30ff3d7551eb8dd6ee472b9182300e3eb02a9b8f3f9ccb58201abd8706fea0b24bacc40c0b0ef0448964c6101dcab367b7f0d5d46830a37f58980a22e90f32ed03882f4098ec32f810cc887e38f804f27e0abee0151db3b9d4ec61e18e179dd0ebad653604936e66da9929b802ea2f1b00412d14cf605125dae20801b5ca334dd17e080262320d2bcc93a482633112e01c1e9a97704382c21c25d064110c8120ac2e3aa49ebc1c146e7988e67994ea7b2a448016e15783e9fe7b643921eff511ff8f8a4e8b7c84676bb9db56b1fe9334993d5af684d5818b1a126a839fbdcf298b03486fe1fdbb05eaf2b154885c5d1fd7edffbe2a8d40ac88794c7813e3748c42aced3da1619c42da4861619c0aa2629c54e31ed66ead236b92e04498abd8261d5582220255da26d364fc0292b289f928a8d920f7f50da24dd46ff10b6e5265a659549c04c34610d98750466a4858e71a57679786213f102fc8d629778edca3f9310a8262f314be8227f67b7c0bd184371c69ff77a4fb9625481086c275a4744ba3c8a6a3304c90c51e519999b474205df2bf419e08487ee0c23afccbc86cc5d7a692a63398d46e196c50a0d49a063f2a6aca18df70743f6df9d81afe324df9cf14e29fe0a8fadc7ba5f9e5e9b3075a7013f0197ff787d7e48042d39cf996cf2f3f5f9c8a457ff2fc0009a2c4cd8368e16c80000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (28, 'Inf', 'Inform Before Use', NULL, true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (29, 'CC', 'Distributed under Creative Commons', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000004404944415478daec5b8b919b3010c5d700d701ee00776077e074e074e04905a403d2814ba004ec0a7007beabc07605448f810bc348bb2b217ee7eccc4eee2e42b0ff9fb40a868548e14ee1a6c6758d145c153e149eeb9f819fc1822056982abc292c3d6151ef19cd99f083c2dc23d126cceb77cd06f69ea52dc5dbd48c884692b84423e229d4fd3e03e2db98b810b2b25c1f2afca3f0a7f881300c76bb5db0d96caa7fd7eb75853af8f8f8a8f07abd06e7f3b9c2e7f369f37d881c3f143e87907a587b6391440e8743996559d917b007f6b2f40df124c42b6997499294f7fbbdf40dd8137be31d0226dc7d324144fc7ebf2f6fb75b39348011428df0c204967848c487aadb429ee7126de8cd0432ccc5713c8ad4296dc037087c42e8427cca113f84adbb80c0247297ec6e11c45b3021b1b1fbfb9288b76042dc4bf5a3289a2df10d303e81358588e2605114e5dc010262a2c3c1c9eb2309590a20443251c14efa50aba501e30fb45a70323d008e2e0d909f10a690eb3cbf76f176bb2d970a305b420ba26e7dff6da4dfd6028201c73603b2beb60fef9ba669a531baf08962495a3380e9b0613ca7d34848569a8213bea0683340bbe8743a895e8275c232b562aa299c82893a06f6894c603ab14755236c4d0b24498f65b3e2ab82ec3201bf4b99d86dbc70403c8f94bfca919d9c1f54deb58707621b3506a375ea2ec5e3f1487e27cc8faa0f3217f5621c4cf552ec818f3349166b005843a5dfd807489907e51388685085436dc383735826d5d7a977b76e6fb7ce8036ea0d7fd3d5542e5211996111b8e6fd26a99a1c67a3e6dd9e6197a0b6e429894a08176aabfe3f2800735c9eb3d124f8179f60fade379756d1e3f1d0fe5d49c57a2fcc01748039c218f016cc14dedfdf5f9b01262d7b19068c056f3ed5f372b99092d3d9bb692fcc055d7c870b780d83540285d085f0d60e95a66c92ea3f36711d114452147161d07b22a48bcfdd6c0c04e21dd4c7e912215dcd807554ddc2254283a6c240539edfe4f054418567b10efb504ca73481a85972633124e905301d17b62c6ea4c6b4af58e4ca7682c1c924e5b06eb8e25a0e4b7a16c4be7b2f0d11a898f4e3297b0513a40d119886a416a0d2f6765f70f09698d463732d31f816a9606c5a62dfb229ca4c89d2ff6df1ef3c1861a45f98cef85a3727e60a549b8d1a90bef47094d482a59882a0c3cc9e2f4e6dfaf90b3b2051fc3f222384ed2b1f926a20719df18d6df382f439776d9664dc786b4ac70801080e4a16ae0725c5e7841173c73609e15c729cf3c2ed0ecf18315e20f5694e8c37b5c3108c00e116670706bb3390491b15cd1cb0cf616a3c0b55b71c9d5bd9fcca8111880ebfad6e5745d1d7b519205ae1ddd157fbba4c7365e6f3d3fabe24aef3fc1a639eb00da6b92a47d9fb7eeca14ac81da91f09d33e61ce074436bec12366c1ccaed2465453c5a3aa9f8299df210eeb9233f32cedc310aabe1ac961ee827fd7e7b9930f988a62c27aad7fbe0cf9717f051800f651ef6054e44bf60000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (30, 'ZERO', 'No Copyright', '\x89504e470d0a1a0a0000000d4948445200000040000000400806000000aa6971de0000001974455874536f6674776172650041646f626520496d616765526561647971c9653c000004704944415478daec5b8d71ea300c362c001b840dc206b0016f03d880be09e04d001bd04e001b0426081b00130426c8f39726776d706cc9719cf44777baf6dac489e44f3f96949e689602c953c9e39c4739ebe82cf92ef998ff0ebe892f44a1e48de48be4d411c7f99a4197059fe72f9a36cc71feacced0ccf16e53f9d2b62200c7a805c1cb1ce566e7959692930e08ff91573682f498d70f246f252fc8370c06623a9d663c1e8fc56834ca5845f7fb5d9ccfe78c8fc763c68fc783f37e881c7f243f9ad8f501c7c9cde7f374bfdfa775298ee36c2da9488e6f085b111e2fb95aadd2244952d78435b1365111894b2590849fcd66e9e572499b26280288f0a504a3f0d8111750e75214451434d4568236cc8561e865d77568984c2614250c6c84df98846fc2d66d88601231370c22bb3b5426fb619885a8e170c8d66a11e6aed7eba7bf2334224c826d68b15888b7b737dd256bc9ffa8769fb8dcf9c283074160745eb8c63692109010d6823e5e8efb62705614c155cfc2bd5c32f884887294ad5c0049098776bb5ded14176b70d166880e732baf0f58fa16de5609408e2153e4ed3eec9e43088d8cd435a56498dc706bf0074a14ecaa6ee0da222136b3196b3a348548e5f92b535c87f0fbb4ab100a4c450b772360b69af5827239cbc9432979fa66b3b1f217589b6b8ad4fac1de15ecb8c27351c30dc39a0df9941d3af1be38149962bb2d6a0ae61ebc0cef949d1126551770b5bd5c2ed9e1cc00d327c633b8a4f131b37edeb87822097f76ae8f1c5f47288b95e9e5e5c5e933a8cfcd695c69ffdcc407c4853fd5f6cbcc254d3488fa55ad2a8dd62a0b9a3a521542d7ebf573ad3d0832f4d57916439661bf8001e5855dc2ff703888d3e9f474ddebebabf148cc35038d2ce3bec54d4e4865fbd87928caa6ce60a900d16fa3a5845dbedd6ecabffb26ef0a80fdaa765f86b7c651d709056cb7dba76e0fba472a87f8ed1480dd8702544a716df79d5400a05fde7d145851d06c8bfa2e332e53e852556d55886802793a8a5d1c3a2827416aadc15450b129cc569d088180ab0b04706d188eaf6af7cb3d83bacfd2c872870294ff45e3834ba614b6ec0f54610fc2ab72049b67101470ece743054f8434959b7353bb3ac8f7ab4e8126c5db748e346b9eb5f540d70511cabaa6260ad737a19741a90bee5d94c3a98eb08a502e33dddb5449cc6b515455dca408cf2d8a1a4ae31b52599c5b18fdaa65f19fd61889bf7d6bcc50a09dff36476ddae35c53f88aed71ed800416e6ce08f81e9080b90a8b59a11f3b22f33b2465630a0512b8e6d0d4aca001f6a4d9207654102d4e897eccf30966150bcb41c9c21f18e78411737d0f4d1ab23cbff3c285f3f28106440602e4db99182f52d6261401c119297663df0c449c388e135e9d616adc8b35987904cbe67b168a40746035f551012a3e9901237c96c31d4263f993195d69acaaef22f9af8f72fa4474eba3a924cf5dbcd2c0942b78e24d9d30e78242d1cef7833bd1b14f6927baa28a43a8774e7095692c1da3629f1f639d43bde7411953f13e86839f43513192f3b18c2fde3f9f3fe7bf9f9a7cc1ff020c00b8b38128c9ee68580000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (31, 'GPLv3', 'GNU General Public License, version 3.0', '\x89504e470d0a1a0a0000000d4948445200000020000000200804000000d973b27f00000002624b47440000aa8d2332000000097048597300000dd700000dd70142289b78000000097670416700000020000000200087fa9c9d000004e54944415448c77dd5d96fd4e715c6f1cffc663cf6d8c62bdb00b62103b6716d106002c62d04255237acd2566aa4a4adaaaa7f01bd6f2e2ab554b9897a59a98bbaa4a9aa4895825a2ab2101a08e0408bcd6a8cc0061b30d878b03d335edf5ec47128697bee5ee93ccf7b2ecef93e319fad948c1d766b55af0c53065d74da3937e49f6e8e3df54edae280176c2ca95a96ac88a550900d9333f971fdde76448f99ff6dd0e045df4936ae4feeb04ba3554a91735f9fb33e726b66a6cfeffcc9c07f995b64bbd763d94c38144e848761213c59f3e16138110e854c8865fdd176d167e5fbbc9b9a3b108e85a9104208e3e166980e21845008f7c26c082184c9702c7485d49c77edfbc422be68b0cd4fcaf67e3b7e48ad01b7dcf077d76d95c4a85f199675cd800acf125dab9fdda0c7dd4f0d1abc92facacbf1970cba6741d68831ad9a4448b8e5b23c4a64ddd72219f5d5cf56f948f66383623f88beffa5d4cbee6ab6d99871092d3a9480847a299139799b640c5a2b1fdda80fe3bacdc7b1c38f36d6fdd0a45669c7957ad656eb8cba8f94bcac5a2d9a95f8a7159adcd2e272f168da194309a5ba8a377d5ddab08dde93b65791111f98526456ad017129e53ab549e8f6452b257ccdcf374d77b918d9e8f90dc903a6a4e58c6b5364c43155ba1cd4e18cb7e475a8f08e7119a5ee5b6ece01cf243d6f5364672cb3539359c50a22298f9d50a3d5843beea8d065c40519291f9a9534a3d88c26ed6219ed091da9aa5d6a149b9211f30f13c6f03783266c96f5c88c537a341bf15793b67a28526d9737ab721d91b665c94609351e48f902a62da8b5c6944885981171b3e28262cbecb4ca5d958a34aa48da12a9abb20a0d726eaab75ba443a7325f96f607539ed3ac47c16e9126ad068d6a14b35a257509e5a54a516dab0f4d7bac588b22cbcd98d4eff3f61b326f5cad3addeeb96a9b954829a53c211613434cab22175cf1821279efd9e0255b3ca34a95ef7acb294dceb8e29b368bf0b12e21972fcf2f9e458b6a8fad5570ce80061b3478c7512dda5df0a62679ab6416f73f2f4f2e61687ce5c8e245cdeb35eb9ae386bc68a531311fe8356ebd2e77641db47671c17920cb50c2a5c76dfd89fd62c8b9ec913b7698b14ea54bc61cb05ed6468d6af46b5e9207fdb2732ec5a5e73b57a69e5382b80ddae4b47b24618d65ae88f9aa6675fa1cb5c7ba25843df66bdde3e1b791b30b37bb5d5fbcbbb4b4245af51853a6dd4dbd265cd5a7a0ec0902f63b6be196eec835ef5f9f3ba2b044d56a431a553969da1abb5df5be1b46d4492fc90b8eb83ee7b8ab7133a6e7f68dd6ec54b748d9b80b1ad4396dcc6aab4dbbeca14a1dd62f1974fb99e19b5ed517c703d5a3edb9a27655609951d7345af08631ad92068df886e625920efaa9e3b9f04b6f988d63d670681cd81462db9522b2ca0391352eeab55acca0727b942eca47bde6f55078dbabee7ec2c45143d36d97d2d3b1269548aa57adcc8471a79d7756a7560970db6b7e11b2e7bce2fc93541e325c68ea4d0fc7d659212e2e6e415cb911ff92b4dc7a95669c77d8ef43f6bc1f7bd7c27f2643c27e47e3b96de170e809938b81321d2e87a3a127dc0e63a1271c0edb4222efa8fd8be33c156d9166dff3adf2bacdd11ebb64ac508c8231fd4e3be5cac2e46d7ff61b573efdfde9702db7c7417be375e5e53551ad32e48c1a5b98989cbfed84bf3865f2ffa533546bd3a9d3e7ac5682827b2e39e9a45e8f9e6efe379cddf27e00b2b0830000002574455874646174653a63726561746500323031302d30312d31395432313a33353a34372d30373a3030be1641b70000002574455874646174653a6d6f6469667900323031302d30312d31395432313a33353a34372d30373a3030cf4bf90b0000001974455874536f667477617265007777772e696e6b73636170652e6f72679bee3c1a0000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (32, 'GPLv2', 'GNU General Public License, version 2.0', '\x89504e470d0a1a0a0000000d4948445200000020000000200804000000d973b27f00000002624b47440000aa8d2332000000097048597300000dd700000dd70142289b78000000097670416700000020000000200087fa9c9d000003fc4944415448c795955b6c936518c77f5ffbf1ad5f5b4ab7216c7127d846066c333bb1c112173c5c6036b30b41259198188877265c301363d44412d42c9a70a131124d8c12e38d0968c8300b07290293c8d898e28cb08aa056a05bd76e6bfbfdbdd8a97420f37ff5bec9f3ffbfcff3bccfc160216cca69a0856a4af001e38c30c0f7fcc0af24b28d8dacbb452ded3c468527b8d40a1836304154b1a9c46d86f996c3f433756f81529ee6396b4d99d540336b58891788f32797394b1f57a6a62ef3295f70f52e7ee3a29ecf8d68b976eb84224a2b1369457442bb552e23ca41ea712da4b7d16ba7dad5a398ee85987ad42e3b452f6dd912f5f4fa523b352447ff054743da295f8a5eeaef8cfda09ddaa5b01683b076c94e7190d2597a0e5dae68bb86164597a42175c815e56572a605363050a1a392d29ad0c47d8298c651558801368089978e9cca4e9af9895e463058c5a3acc6e11c3f22c060394d94f2373d8c524f236e36d2c9fecac90e06a09650954eeaa41e91572eb9e4d3365dd6a45e915fb63cf22857cfe8aace6b9d7c7a559392a4ef5425423ce4a2c9286f229ffd1c23cd2a0a18e710874892244e9a62f2b8cd371cc76182384904c05a9a30ca6974b1d10eb630cc310c9ee233dea3854a441a8002dee16d0a89f133498c8cc20dd28c1d64a3494dc05acd056e91cfb3345343094b28c102600945801fcdbd3c5f796b0858f15a93e220b9fc41927ccab8ce216240218f03708b8f8810268722ccace22d6019378a4dfc5e3c248125588cf016bf63d04c13060637f91007833a5a21cb071b2ff84d0c03132f0609e2f8584b8a3053081036d5047990add47261a67d67f3307d3289c7fd694ab08870894edee563bae75e59c1ebd4e263d95cef4c328a05e490200171936bb757dca48142aef03e13388448ce0998145294313c1c8e700d03832dac6414ae990c8ed6fc626e650b0738493f0e637858871f0787344e46d40e70894b002ca792688a4193d38927cee46de32552f410c5621d1dbc401eb994513cf39dd3d3ae684ece8d451f89514e1b34f2416dc3011af98b8bdcc0a2942a0238fc46180feb593a431a63706ea6bab9ce5e2e9ee745584ab79d7c4d894537b3244dea0dd949baa7d5db18ae56e87f0984542d86699bf6c7cb9beef1edbab268fa556d977b9cbd7867f353c5e180b3479145d123daa380c3d754656e8736ce059d2e8ddc973ea22e051dced176e74e31e9a02fe0ecd0d9998171f7d49dd50e051cfa7872416f61b29923ee789df6a9ff2ebb21a67eed539dcc0447d83c4fcf74c34515cfb3cd5fbcd6b58966ca79801c609208c39c21c490130bf3259f30345fa0d9cbd5cf263a79d85decf7e7b9f2f10171fee1a633164b8739c15784886512b2050072a9a19556d653800798e006839ce21417b9956dfc2f6ec53a03c4e284690000002574455874646174653a63726561746500323031302d30312d31395432313a33353a34372d30373a3030be1641b70000002574455874646174653a6d6f6469667900323031302d30312d31395432313a33353a34372d30373a3030cf4bf90b0000001974455874536f667477617265007777772e696e6b73636170652e6f72679bee3c1a0000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (33, 'BSD', 'BSD', '\x89504e470d0a1a0a0000000d4948445200000020000000200806000000737a7af400000006624b4744000000000000f943bb7f000000097048597300000dd700000dd70142289b78000000097670416700000020000000200087fa9c9d000008c34944415458c3ad976b6c54651ac77f67ce9999ceadf442e96d5aa8564a0b95562a140a5b16f012244182bb62a29b266b4cc06c4282f1839744dd88624cc028f10388ae06814d40651502ac5c1a2d852d5b7a675aea3230b45c5a3a6da7a773aeef7ee8880501d1f84fce87c9799ff7ffbccf7f9ee7fc5f89bb4312300528036603d3817c2090783f04848136e024703af15bfba58da55f78ef4a902d0596005325494a73b95c2eb7dbed703a9d00188681a669b6aeeb9a10e21ad0091c02f601ed80f15b12c803fe043c0314a7a5a5b90b0b0b99397326454545e4e6e6e2f7fb0188c56244221142a1104d4d4d747575118d46e309f24f81dd40e42eab8d049403db81684a4a8a78ecb1c7c4b66ddb447b7bbb181c1c149665899b619aa68846a3a2b5b5556cd9b2453cfae8a32239395900d7807f00f7df45c57100d5c0b7b22c9ba5a5a5e2830f3e10172e5c10b66d8bbb856559221c0e8b4d9b3689929212e170380ce0205095e0b82d2a8023b22c9b8b172f16070f1e149aa6dd35f1cd88c7e362dfbe7da2baba5a381c0e339144d9edc80b807fcab26c2e59b244343434fcaa53df0eb66d8bfafa7ab170e14221cbb2999036ff66f224e06560a8bcbc5c1c3e7cf877211f2fc9810307446969a900a2c08b807b7c02f3808ef4f474b175eb56a11b86d08787457f73b3e8adad15d1ce4e61e9fa0d9b6ad1a818ecea12b1484458e364b24d538c5ebd2a62918888f7f509db34c7d66b9af8f0c30f456a6aaa005a80070164c00f3ce77038962e5bb64c5ebb762d6e4da3f5fdf7697ceb2dce7efe39bdb5b5b8fc7e92efbb0f87a230d0d141d33befd0b67933170f1d428b46094c9e8ce2f5d273f428a713713d478ea00d0ce0cdcec6939a4a4e4e0e6d6d6d8442a114a00f382127dae36f99999979eb5e78818ab232ba3efb8cc6b7dfc6d675dca9a944cf9c61a0ad8d8c59b370280aa75e7f9dce4f3f451f1e66f8dc397a8f1dc39391417261218d6fbec9b92fbfc4181961b0bb9bc8a143a8910869a5a5a4e7e763db36b5b5b5b2aaaa2ee0b80cac0056cc9933c7bb7af56a92749da677dfc588c598bd7e3d25ab57234912178f1c21909f8fa569b46ddeccc4b23266bdfa2a5955554892c4c4f2723c93267166eb567cc120951b3690f7f0c318b118170e1c0020abb2125f6a2ac78f1f271c0ebb81330a50a528ca848a071f243b3b9b584707b17098896565041f7984a4f474262f5bc6d99d3b19eaee0649c25455a6ac58c1bdab56212c8bc2279f44f1f918b978115bd7f1e7e793fd873fe00c04489f3993132fbec80f7bf6107ce82172abaa78e08107f8eebbef522ccb9ae70066783c1ea5a4b818b7cb85353a8a6518b8d2d2509292003055156159385c2e1c4e274208b4fe7e2c4d439265dce9e9c889b54863c34ed83600130a0b995a53831d8fd373f4282e59a6b8a404b7dbad00331420cfebf59293939398850e2460b8bb9bf0d75f234912ddbb7661eb3aa9d3a7935a5282272383b33b76907ccf3d4c59b10267e29b70ebc12e91316b16818202fa9b9ab0464608e6e6e2f57a255555f315c0e776bbf1f97c37c4f57eff3dfd2d2d0821d0a351148f07637898d4e2620a57ada279e3464ebefc32c3e130453535f882c1dbe6e09a30015f5e1e433ffc80118be1f7fb71bbdd007e05902449424a940e21c626535a1a29d3a681c381adeb0c767612dab68d8cd9b32979fe792459a6e3a38f38bd610303eded94bff4d275c97e56048703d9ed469826c2b2aecb04480aa06a9a96acaaeaf5000164575753f1da6b285e2fb661d0bd6b17cd1b377261ff7e32e7cca174ed5a528a8b69d9b489735f7d859294c4b4679f1dbff975589a8676ed1a8acf879c9484aaaa188601a03a801e5555b974e9d20d41ce40007f5e1efefc7c92efbd9729cb97e30b06897674a00f0de10c042858b992aaf7de236bde3c22df7e4b7f53d34f951c87d8850b0c9d3d4b7241018adf4f6f4f0f8903471c40dbe8e8a8150a85304cf386409190034076bb915d2e6cc3c0d67510024992482f2b23b3aa0a5355517b7a7e8a492462c5e39cdfb78f787f3f59f3e661c932674221e2f1b809b42b409d61188b1b1a1a52ae5cb982fbc736b22c6c5d1f7b4c93aba74ea1f6f6e20b06e9deb50bcfa449e42c5a84158f136d6f4752149cc9c9489284adeb98b118b6ae73fe9b6f087dfc3169a5a5e42e5ac4e5ab57f9efa95398a63904d429c071e07c737373cae9a62666a7a7230941dfa95334ae5f8fc3e5c254557a8f1dc3d234264c9dcad91d3b180e87c9acac0449a2f7d831d267ce24b5a40480bec6464ebef20ae6c80897ebea507c3ea6af59837fca14febd772fadadad00e780130a63beadb6b7b777fa9eddbbe569cf3c833310e0eae9d30c7474dcf09fb8efa9a728aaa9c1150810fae41322870f836d13282860fa9a35a4cd9881272b8b685717ffdbb307d9e36162793925cf3d47fed2a5f40f0cb07bf76e2e5fbe6c02478090c4984f5b046cc9c9c92978e78d3798e570303096251220290a29c5c504972cc1170c620c0fd3d7d8487f5313b6ae935e5e4ed6dcb9488ac2e5fa7a063b3b9180a48c0cd2eebf1f7f5e1e36b063c70ed6ad5bc7952b57ce027f056a7f3ca00fd800a80b162c1027ebeb85a969d71f4bd384b88541b12d4bd8b730a8c2b67fb6beaeae4e5456560a6004f83be0bdb95b4a80fd4ea7d37ee28927c49950e87773446d6d6d62f9f2e54251141bd80b14dd7260017f041a5c2e97bd72e54a71e2c40961261ccd6f816118a2aeae4e3cfef8e3c2e974da403d309f3bd87305580e34389d4e7bfefcf962e7ce9da2bfbfff5793f7f5f589eddbb78bb973e78e275f9ae0b823146021b01f50b3b3b3454d4d8dd8bb77af387ffefc1d6d7a3c1e17e170587cf1c517e2e9a79f169999993f6afe2f60016316f06765bf151c099dfe02fc5996e5c9d9d9d97269692915151514151591959585cfe7430881aaaaf4f4f4100a85686868a0a5a5854b972e99b66d9f0376019f31765f14779bc0f8ee989390a51ac8773a9d018fc7a378bd5edc6ef7d8e75ad7191919617474d44c4cb8307014f80af80fa0de8ee017ef6a094c4874492563167e3a9093489044992f02adc0f709bdcf30766dbf23fe0f708f107b7e1ffe120000002574455874646174653a63726561746500323031302d30312d31395432313a33353a34372d30373a3030be1641b70000002574455874646174653a6d6f6469667900323031302d30312d31395432313a33353a34372d30373a3030cf4bf90b0000001974455874536f667477617265007777772e696e6b73636170652e6f72679bee3c1a0000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (34, 'MIT', 'The MIT License', '\x89504e470d0a1a0a0000000d4948445200000020000000200804000000d973b27f00000002624b47440000aa8d2332000000097048597300000dd700000dd70142289b78000000097670416700000020000000200087fa9c9d000003c64944415448c78d954b4c547714c67ff7313338033e50288332764420bcaa54a882582498b4b18d34e96bd336eebaedaa9b2ebbeaca2e9a74d1b44935b4606a1b6da2a98dad8b1651d0d80a54c40a835625508b3c8619e672bf2e18601844f856ff7f72beef7fce3de77cd76039327896ddbc403921b2807122f47085eb4488a7071b69772fe51ce610c546b6d7eb333d4082b83b13d7236ef13367e925b1b240016ff22ea5d9be9deca284ad640293dca38f3fe8672c462fc739c5bd27e48d41152d8c6dd42bfa4abd7aac59cdc3d198baf5855ed67af188af796e59e6983470c1722af599eecad59330ab883e5599cc04e7d98fb954a09a5f2da749e715d7d310d35935c87438cfee547a98939673485d2bbc9d0a571d3a28cba185d062db3e62bc4abfa454fd34ccea27558a313ec4372750c75f9bf5a5126ba24bd28c3ed72671831ab0c9a4d92c6ce200bdccb09e301e66b9cb2826213631c41821e23c48a9d8623f0d9c2ed6117aa09ace3c7da78bdaa7b08e6850d288de5758656ad5a43e50a5bed731ed54583b54a81d0aab426dfa4639e212bb4cf6122ea79a69220c70950830c2550618601c976106984c4edc287ff3180303931acaa0901a93fdf6861af27031b099e036628861ec94465bbc4d1bdff22a3e8e7292e33492cff3581ba9b3a9586797e2034cf298a68f1803c4d8c67f2955070932432e0605ec014094e2b3a31526057ef2175621877e46e9c34b51faa8e122402839f95bf11b844c023e02c9902d6ce32e77b84d2e5b93812b23131f649a1846723344163b19a5930861b2571530000c93689c6852c04b11312e3042117e56439404444dee4779b8a0598887df48508c7715ba784014ee99f44ccff62d98cc367298248b10c62a25c4b949cca1d7a43d31d1c50806426c613b104c7e422d7b7511c35cc319a7dde412437f721d8b0c3c0428c14f21d958f8b0012f1958c9023d0b67b846370c72d96282c2a93d5eb391626a292687325ea2183fa5d492472655d4b11930f053493d4160946374386ae51418347127a83639493b993795f9dba2c9cc9f133aa15cd1cf8b73d904f88468bdaeacd10d5cb56b9f98e2e3c55e9771cee3bea19b6b12e851b36c973394a40e55235d5ef7755d96f3547242ed7a4d1e970eea975abb4d335d1eb75eadfa7745faa85a543b473f8c9d3e193607394734a8a33aa3a1347b8f29a21ff48e9e1153fcc881856e2e49c3a484f778cbda1eb42aa9a6843c028828f7e9a38b1b3c74dc41da38c1adc5994aff4505d84b330d843c59eb6c3f3ec40c534c3bce38112e729acee4eead2000b08132f6514739f9048029fea19bdfe9e026e3e9c1ff030b55eb0b90e9adfb0000002574455874646174653a63726561746500323031302d30312d31395432313a33353a34372d30373a3030be1641b70000002574455874646174653a6d6f6469667900323031302d30312d31395432313a33353a34372d30373a3030cf4bf90b0000001974455874536f667477617265007777772e696e6b73636170652e6f72679bee3c1a0000000049454e44ae426082', true); - INSERT INTO public.license_label (label_id, label, title, icon, is_extended) VALUES (35, 'OSI', 'The Open Source Initiative ', '\x89504e470d0a1a0a0000000d4948445200000028000000240806000000fb3c781600000424694343504943432050726f66696c65000038118555df6fdb54143e896f52a4163f205847878ac5af55535bb91b1aadc6064993a5ed4a16a5e9d82a24e43a3789a91b07dbe9b6aa4f7b813706fc0140d9030f483c210d06627bd9f6c0b44953872aaa49487be8c40f2126ed0555e1bb76622753c45cf5facb39df39e73be75edb443d5f69b59a19558896abae9dcf249593a716949e4d8ad2b3d44b03d4abe94e2d91cbcd122ec115f7ceebe11d8a08cbed91eefe4ef623bf7a8bdcd189224fc06e151d7d19f80c51ccd46bb64b14bf07fbf869b706dcf31cf0d3360402ab02977d9c1278d1c7273d4e213f098ed02aeb15ad08bc063cbcd8662fb7615f0318c893e1556e1bba226691b3ad926172cfe12f8f71b731ff0f2e9b75f4ec5d8358fb9ca5b963b80f89de2bf654be893fd7b5f41cf04bb05fafb949617f05f88ffad27c02781f51f4a9927d74dee7475f5fad14de06de057bd170a70b4dfb6a75317b1c18b1d1f525eb98c82338d7756712b3a41780ef56f8b4d863e891a8c85369e061e0c14a7daa995f9a7156e684ddcbb35a99cc02238f64bfa7cde48007803fb4adbca805cdd23a3733a216f24b576b6eaea941daae9a59510bfd32993b5e8fc8296dbb95c2941fcb0eba76a119cb164ac6d1e9267fad667a6711dad805bb9e17da909fddd2ec74061879d83fbc3a2fe6061cdb5dd45262b6a3c047e84444234e162d62d5a94a3ba4509e3294c4bd46363c2532c88485c3cb6131224fd2126cdd79398fe3c7848cb217bd2da251a53bc7af70bfc9b1583f53d901fc1f62b3ec301b6713a4b037d89bec084bc13ac10e050a726d3a152ad37d28f5f3bc4f7554163a4e50f2fc407d288851ced9ef1afacd8ffe869ab04b2bf4234fd031345bed13200713687537d23ea76b6b3fec0e3cca06bbf8ceedbe6b6b74fc71f38ddd8b6dc736b06ec6b6c2f8d8afb12dfc6d52023b607a8a96a1caf076c20978231d3d5c01d3250deb6fe059d0da52dab1a3a5eaf981d02326c13fc83eccd2b9e1d0aafea2fea96ea85fa817d4df3b6a84193ba6247d2a7d2bfd287d277d2ffd4c8a7459ba22fd245d95be912e0539bbefbd9f25d87baf5f6113dd8a5d68d56b9f3527534eca7be417e594fcbcfcb23c1bb014b95f1e93a7e4bdf0ec09f6cd0cfd1dbd18740ab36a4db57b2df10418340f25069d06d7c654c584ab741659db9f93a65236c8c6d8f423a7765c9ce5968a783a9e8a274889ef8b4fc4c7e23302b79ebcf85ef826b0a603f5fe9313303a3ae06d2c4c25c833e29d3a715645f40a749bd0cd5d7e06df0ea249ab76d636ca1557d9afaaaf29097ccab8325dd5478715cd3415cfe5283677b8bdc28ba324be83228ee841defbbe4576dd0c6dee5b4487ffc23beb56685ba8137ded10f5bf1ada86f04e7ce633a28b07f5babde2e7a348e40691533ab0dffb1de94be2dd74b7d17880f755cf27443b1f371affae371a3b5f22ff16d165f33f590071786c1b2b13000000097048597300000b1300000b1301009a9c18000009ae494441545809bd586b6c14d715be33b3b3e3b5bd7eadbd181b5c1b080e71e40670c02950d92502d407aa9ada4514aaa4519b1f55faab9192082946156afab351aa4a11ea9f2a4de36daba0264da9a80c22d084d8450d260a8ff0f6738d5fbb5eefceeeccf4fbeeceac97e00527917ae19b7be7de73cffdceb9e79eb96bc5711cf1658b82021d8ae8025822824aa1facb2bf749855fe021497509b5a3a543018f0c09e5ab5184a2741eecf41d3f7fdc717a1d2b7fec73b569e4e7053ca5810e89e5e662d12220e4c27fc7584f8fca39f97d4b6dcb45966a91f45a4787e6f4f5d163f091b2b676cbcadd465d608b1ed49b145d2dc3263b76ca9a4e4f9b9793c38913e31f0cff0d646e4af94ec5e7f4496f2f75c9ac1796222dc9f540fe25c7467b79fd8ea603256b82fb8b9b8241bdc210aa5f25e3ac2adb11b6690973322512576627e29762af8df45dff3588ce2adddd9ad3dbbbe42d5f9207f3c95534847684b6d71eaedc58b3d257ee27215bee33482d1410055fa182b12354732229a63f8c0e8ebd71e3c939676e40e95640726971b93482aed5a1967077a863f99b559bc242f1a9a695b27c20471a8b165076609c0defdab669ebb74f8dce8ebd33b43b36327542e9ecc476674365d1c96ee77d4fb1e2c64d5959d9b6dabd8daf576d0639454959c98c819a5e5ad0ef39d1ed42450f6a56d2d234434b556fad2d83df2298b70ddb7dc1d3bda0e0eed63d0942114f5e067569e39ee6572b37857d60675aa6652824464292144f33daf0965cc2f6bc9a654a5978db500dcd0c3d16ae494fa57e03b95d3c30d0ad628dec3c39f9ce0723a570e9ea9263cbdaeb7f52fe48a8155b65d9694bcf9193a3c8379a6aa97ecdd6fcaa2a616856962c5893232b781bdbec53033e51de5ab533f46038abbca3e39e1c0a7ad0f59e853ab0faa987f617d5160bc7b26daca72d58849535d58607b4d45042a4a2f3297ac4a82dd68d9a22f082472d9bc73bfb1ff18ab9667163d01f7ca4f249b4238c43ce29e4c5820485ebbd606570a33f1c58af68d826d3c1138b713bf92039d352a7ce44a33303d15fce9c1d3be1085dab6a0f7fabbcadfaf98af5d52542556d6183a44357222433b6c69454b4bc780b88ad05b18bb94f24d57ea6142498f5bf10a55fad6c2b0a0738cd42a069f25030e0106fd83575fabf9373970f0f3e81854ee6e93e5bac145fd47ea1bd59d65a85230c59261eda062f3262fd5545e535adb5ad9873518061a1b2e8fec33225d21b9181abe10ba1216ea03417c8e4a7fa14c74a6498885f27397e77792adbdada742e967012bd89ebf1bf3ba62d655db7b3825d8aed2bd5855e6d3452b6303d5a759fa2f89420b737bbadae30768a092e339711e9447a90bdb1e198c253d9dfdf9f6e7b264bd28aa73f468a9107449ef285b51c555785aa6b4176455a5aa88e317057b92f41613929e78eaf0474d00b7029729bd0746d25b5ce18335217170ad60565942a86af9ef1060fd36d0b85f65a0ece8f95f23a2123e778ef5ebd2841297cb047aacc2432b7ec942d905a724bb0c5057c415d041a4af780d38a4baf5c4af11306c54adf4bf2646e08d405bead16e1e263c913e2ad09c242b5126961cd646eb1b3ebfcf99cee9c90db5894a01c7327256f24ce9a9349462092b4b7d168d8b606b7d838a92b1bbbd6feb5b67d45bbfcbec2b3cb36d7ed6cdab7ee4f65ada12066f070611d5a05cd3c5c082d5c24cca933e3e7b85604ff0a95c204dd495317a3675263f3d7a840d5142ee6ea42e24d5baa5e699835dbeb1ff52f2b7a550e28a238f095e0e19aceba07b4129fc9b422c9491fc1751a8206de37a3f3ff498b6cfcba377057ef9d55618211014b116a8e3331f7e9ec9ff179e217231b29f91b623b9a359f1156c27a9baa21308793fb2eee847c4100a293f2a8691b74a8c9d184885d988d4036dd8d8b084719bba8ef2a050962b2233ab35f8de163d77f3bf3d164148745477a497b5e445ce2880a6deeca6c7cf8d8b55e4ffbe491d137e6aec5b89d3eac8ceb18c9c17b7ecdb4e1d1d9739383e3ef0fbd42f94824bbbd723d4f415e5d902065e4c79cd722c7b936f541f4f9d827d3f4808e85335c1029c8611a498e248ee3fd637a9cf3e2227e3a3592e817199903f929642e4c63c83f3d3021a64e450f34ffb83910ee6caac59805d0cf8b967b1294338e1f97b7dfdbe7467f7ffbe4d8216c37f21796d355a6082d85ed8a5f88fd45cab66df4c91bb3e3a4e6afc68ff0460dca0a3d47c3660727c5e4e9f167a6ae4e1c890f259fd574d15cf778d3aeba9dabbe5ebf63d56e6f9bbd9a3aef4b90d66182941b3d79e3c0f8d1a117a6faa342641c038b2a891bf15b20ffae24b86ac016d188f4e2c87b37df4a5c8fc561886627337e5c565313476f3d1d1d187e8db2f83cdf542d27a6aace06e4843d8e626faadedd5c2af5f4c8a895cd82df6239ea3ec091bf43e48d63f4f4cd974b2b2a3e34bf933a54dc50bad91c9dff07c6c7948318c735deb31e7d838dddcdefcd7e34b96bee72ec9f43912b2f9a8e39e0e9f5f932a72d474f238e7b612c6e14191fb282348ebf7b3c3919bc50c6777a8984595390b54cbc79e3bae811ee511646685df847b59d0d2d729c3f45b373384fcad46d6dd85ab5baea071cff8c8cfca9e1f5e7d7ee9a72be9c231faec27cc1fc362779ef68dfa5dc23e4c92c567b862d36e6f5e5ebf674a2ce79a404ed2d003fe0f4e0726055dee4b5787f98ef28654699b1a6a4b184a7307f175a30b6ce9531f40a7d7d757335f5d1ab86dbcfbb1b77ca00ca008ef3da1570c71f443becb66522c64953cad1c9df09f5c005803794fd00929938059c019e06c8ee04d00e5096e32f43d9fbd0f153b4b9a8095c06360235c03830035c825c1fe47e86f66d6083dbcf8341c2978029600dc0bf52bc05f9b3b48ce53180823b002eb01720913dc07ae057c03c700bd80790dcb3c0bf801f022c0f03f4c83b0089166181e75053df37012fd7d1630f0057011a1e73e5f8257a02f804603c7f0390ccbd3a0141a694043a483c8ad769bcf3cf1cb4e853e018f06f80060c03d78087009643000d7d0a6800fa01167a8f84653e75db49d4f432c9d3709609806dd67f0466816cc08304e3ed77400ae06492f92e701db801d0aa4e6004380774002f00edc03618f273e8f83eda9c4fef309ee9856980db3c003c0a44811500f58e02278117818b00c9519673e8eda3d07b0575ee90ac449bd6739bf6017f003601a5104425760124510990841f0801abdd717aad0b206154d28b7b50cb83867a1df035601940877807612dda7b01ae4f9ddf03b6033af5c89401eb594bad18e0a1617c35a0eb39f79d79692179b233af40fe0b8d2f611e7f5cb849341b77f2ef7a58bb09687539331e1580278d605be645b74d7268e6123bb796329c47796f9cfd04fba5cebc79520efdde3ab9bc2b3d88815c8155396fe6b77302ffe7c6ff00610e5ffd6a8201470000000049454e44ae426082', true); - - - -- - -- Data for Name: license_label_extended_mapping; Type: TABLE DATA; Schema: public; Owner: dspace - -- - - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991138, 1, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991139, 1, 31); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991140, 2, 32); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991141, 2, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991142, 3, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991143, 3, 34); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991144, 4, 35); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991145, 4, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991146, 5, 35); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991147, 5, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991148, 6, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991149, 6, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991150, 6, 27); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991151, 6, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991152, 6, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991153, 7, 33); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991154, 7, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991155, 7, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991156, 8, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991157, 8, 33); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991158, 8, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991159, 9, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991160, 9, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991161, 9, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991162, 9, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991163, 10, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991164, 10, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991165, 10, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991166, 10, 24); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991167, 10, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991168, 11, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991169, 11, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991170, 11, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991171, 11, 27); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991172, 12, 24); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991173, 12, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991174, 12, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991175, 12, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991176, 13, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991177, 13, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991178, 13, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991179, 14, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991180, 14, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991181, 14, 30); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991204, 22, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991206, 24, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991207, 25, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991208, 26, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991209, 27, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991210, 28, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991211, 29, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991212, 30, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991213, 31, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991214, 32, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991215, 33, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991216, 33, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991217, 34, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991218, 35, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991219, 36, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991220, 37, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991221, 37, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991222, 37, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991223, 38, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991224, 38, 24); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991225, 38, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991226, 38, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991227, 39, 27); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991228, 39, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991229, 39, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991230, 39, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991231, 40, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991232, 40, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991233, 40, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991234, 40, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991235, 41, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991236, 41, 29); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991237, 41, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991238, 41, 24); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991239, 41, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991240, 42, 20); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991241, 42, 27); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991242, 42, 25); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991243, 42, 23); - INSERT INTO public.license_label_extended_mapping (mapping_id, license_id, label_id) VALUES (991244, 42, 29); - - - -- - -- Name: license_definition_license_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dspace - -- - - PERFORM pg_catalog.setval('public.license_definition_license_id_seq', 85, true); - - - -- - -- Name: license_label_extended_mapping_mapping_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dspace - -- - - PERFORM pg_catalog.setval('public.license_label_extended_mapping_mapping_id_seq', 991244, true); - - - -- - -- Name: license_label_label_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dspace - -- - - PERFORM pg_catalog.setval('public.license_label_label_id_seq', 35, true); - - END IF; -- End of license_label_extended_mapping check - END IF; -- End of license_label check - END IF; -- End of license_definition check -END $$; \ No newline at end of file From d1ac5727a92dbb88a948e95cb09d22a3948e4fc4 Mon Sep 17 00:00:00 2001 From: milanmajchrak Date: Tue, 4 Feb 2025 15:29:46 +0100 Subject: [PATCH 45/45] Removed unuseful comment --- .../dspace/app/rest/security/StatelessAuthenticationFilter.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java index 6d2139b26849..32169fda4a8c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/StatelessAuthenticationFilter.java @@ -105,7 +105,6 @@ protected void doFilterInternal(HttpServletRequest req, } finally { // Complete the context to avoid transactions getting stuck in the connection pool in the // `idle in transaction` state. - // TODO add the issue url Context context = (Context) req.getAttribute(ContextUtil.DSPACE_CONTEXT); // Ensure the context is cleared after the request is done if (context != null && context.isValid()) {