This commit is contained in:
Michele Artini 2022-11-17 11:19:07 +01:00
parent d81a72b896
commit 582c198cc0
9 changed files with 188 additions and 58 deletions

View File

@ -58,13 +58,8 @@ public class MainController {
@GetMapping("/simpleResources")
public void simpleResources(@RequestParam final String type, final ModelMap map) {
System.out.println("TYPE: " + type);
final Optional<SimpleResourceType> restype = simpleResourceTypeRepository.findById(type);
System.out.println("OP TYPE: " + restype);
System.out.println("OP TYPE: " + restype.isPresent());
if (restype.isPresent()) {
map.addAttribute("type", restype.get());
} else {

View File

@ -1,29 +1,48 @@
package eu.dnetlib.is.resources;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import eu.dnetlib.is.resource.model.SimpleResource;
import eu.dnetlib.is.resource.repository.SimpleResourceRepository;
import eu.dnetlib.is.util.OldProfilesImporter;
@RestController
@RequestMapping("/api/resources")
public class ResourcesRestController {
@Autowired
private OldProfilesImporter oldProfilesImporter;
@Autowired
private SimpleResourceRepository simpleResourceRepository;
@GetMapping("/")
public List<SimpleResource> listVocs() {
return simpleResourceRepository.findAll()
@GetMapping("/{type}")
public List<SimpleResource> listResources(@PathVariable final String type) {
return simpleResourceRepository.findByType(type)
.stream()
.sorted((r1, r2) -> StringUtils.compareIgnoreCase(r1.getName(), r2.getName()))
.collect(Collectors.toList());
}
@PostMapping(value = "/import", consumes = "text/plain")
public SimpleResource importFromOldProfile(final HttpServletRequest request) throws Exception {
final String xml = IOUtils.toString(request.getInputStream(), StandardCharsets.UTF_8);
return oldProfilesImporter.importSimpleResource(xml);
}
}

View File

@ -0,0 +1,114 @@
package eu.dnetlib.is.util;
import java.io.StringReader;
import java.util.Date;
import javax.transaction.Transactional;
import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import eu.dnetlib.is.resource.model.SimpleResource;
import eu.dnetlib.is.resource.repository.SimpleResourceRepository;
import eu.dnetlib.is.vocabulary.model.Synonym;
import eu.dnetlib.is.vocabulary.model.Vocabulary;
import eu.dnetlib.is.vocabulary.model.VocabularyTerm;
import eu.dnetlib.is.vocabulary.repository.VocabularyRepository;
import eu.dnetlib.is.vocabulary.repository.VocabularyTermRepository;
@Component
public class OldProfilesImporter {
@Autowired
private SimpleResourceRepository simpleResourceRepository;
@Autowired
private VocabularyRepository vocabularyRepository;
@Autowired
private VocabularyTermRepository vocabularyTermRepository;
@Transactional
public SimpleResource importSimpleResource(final String xml) throws Exception {
final SAXReader reader = new SAXReader();
final Document doc = reader.read(new StringReader(xml));
final String id = StringUtils.substringBefore(doc.valueOf("//RESOURCE_IDENTIFIER/@value"), "_");
final Date now = new Date();
final SimpleResource res = new SimpleResource();
res.setId(id);
res.setCreationDate(now);
res.setModificationDate(now);
res.setContentType("xml");
String resContent;
switch (doc.valueOf("//RESOURCE_TYPE/@value")) {
case "CleanerDSResourceType":
res.setType("cleaning_rule");
res.setName(doc.valueOf("//CLEANER_NAME"));
res.setDescription(doc.valueOf("//CLEANER_DESCRIPTION"));
resContent = doc.selectSingleNode("//CLEANER_RULES").asXML();
break;
case "TransformationRuleDSResourceType":
res.setType("transformation_rule");
res.setName(doc.valueOf("//SCRIPT/TITLE"));
res.setDescription("");
resContent = doc.selectSingleNode("//SCRIPT/CODE").asXML();
break;
case "HadoopJobConfigurationDSResourceType":
res.setType("hadoop_job_configuration");
res.setName(doc.valueOf("//HADOOP_JOB/@name"));
res.setDescription(doc.valueOf("//HADOOP_JOB/DESCRIPTION"));
resContent = doc.selectSingleNode("//HADOOP_JOB").asXML();
break;
default:
throw new Exception("Invalid resource type: " + doc.valueOf("//RESOURCE_TYPE/@value"));
}
simpleResourceRepository.save(res);
simpleResourceRepository.setContentById(id, resContent);
return res;
}
@Transactional
public Vocabulary importVocabulary(final String xml) throws Exception {
final SAXReader reader = new SAXReader();
final Document doc = reader.read(new StringReader(xml));
final Vocabulary voc = new Vocabulary();
final String vocId = doc.valueOf("//VOCABULARY_NAME/@code");
final String vocName = doc.valueOf("//VOCABULARY_NAME");
final String vocDesc = doc.valueOf("//VOCABULARY_DESCRIPTION");
voc.setId(vocId);
voc.setName(vocName);
voc.setDescription(vocDesc);
vocabularyRepository.save(voc);
for (final Node n : doc.selectNodes("//TERM")) {
final VocabularyTerm term = new VocabularyTerm();
term.setVocabulary(vocId);
term.setCode(n.valueOf("@code"));
term.setName(n.valueOf("@english_name"));
term.setEncoding(n.valueOf("@encoding"));
term.setSynonyms(n.selectNodes(".//SYNONYM")
.stream()
.map(ns -> new Synonym(ns.valueOf("@term"), ns.valueOf("@encoding")))
.sorted()
.distinct()
.toArray(Synonym[]::new));
vocabularyTermRepository.save(term);
}
return voc;
}
}

View File

@ -1,7 +1,6 @@
package eu.dnetlib.is.vocabulary;
import java.io.IOException;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.stream.Collectors;
@ -12,10 +11,6 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
@ -26,7 +21,7 @@ import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import eu.dnetlib.common.controller.AbstractDnetController;
import eu.dnetlib.is.vocabulary.model.Synonym;
import eu.dnetlib.is.util.OldProfilesImporter;
import eu.dnetlib.is.vocabulary.model.Vocabulary;
import eu.dnetlib.is.vocabulary.model.VocabularyTerm;
import eu.dnetlib.is.vocabulary.model.VocabularyTermPK;
@ -43,6 +38,9 @@ public class VocabularyRestController extends AbstractDnetController {
@Autowired
private VocabularyTermRepository vocabularyTermRepository;
@Autowired
private OldProfilesImporter oldProfilesImporter;
private static final Log log = LogFactory.getLog(VocabularyRestController.class);
@GetMapping("/")
@ -72,42 +70,10 @@ public class VocabularyRestController extends AbstractDnetController {
return listVocs();
}
@PostMapping(value = "/load", consumes = "text/plain")
public Vocabulary loadFromOldProfile(final HttpServletRequest request) throws DocumentException, IOException {
@PostMapping(value = "/import", consumes = "text/plain")
public Vocabulary importFromOldProfile(final HttpServletRequest request) throws Exception, IOException {
final String xml = IOUtils.toString(request.getInputStream(), StandardCharsets.UTF_8);
final SAXReader reader = new SAXReader();
final Document doc = reader.read(new StringReader(xml));
final Vocabulary voc = new Vocabulary();
final String vocId = doc.valueOf("//VOCABULARY_NAME/@code");
final String vocName = doc.valueOf("//VOCABULARY_NAME");
final String vocDesc = doc.valueOf("//VOCABULARY_DESCRIPTION");
voc.setId(vocId);
voc.setName(vocName);
voc.setDescription(vocDesc);
vocabularyRepository.save(voc);
for (final Node n : doc.selectNodes("//TERM")) {
final VocabularyTerm term = new VocabularyTerm();
term.setVocabulary(vocId);
term.setCode(n.valueOf("@code"));
term.setName(n.valueOf("@english_name"));
term.setEncoding(n.valueOf("@encoding"));
term.setSynonyms(n.selectNodes(".//SYNONYM")
.stream()
.map(ns -> new Synonym(ns.valueOf("@term"), ns.valueOf("@encoding")))
.sorted()
.distinct()
.toArray(Synonym[]::new));
vocabularyTermRepository.save(term);
}
return voc;
return oldProfilesImporter.importVocabulary(xml);
}
@GetMapping("/{vocabulary}/terms")

View File

@ -4,9 +4,10 @@ app.controller('resourcesController', function($scope, $http) {
$scope.resources = [];
$scope.tmpRes = {};
$scope.mode = '';
$scope.type = typeId();
$scope.reload = function() {
$http.get('./api/resources/?' + $.now()).then(function successCallback(res) {
$http.get('./api/resources/' + $scope.type + '?' + $.now()).then(function successCallback(res) {
$scope.resources = res.data;
}, function errorCallback(res) {
alert('ERROR: ' + res.data.message);

View File

@ -3,6 +3,12 @@
<head th:replace="fragments/mainParts.html :: htmlHeader('Resources: ' + ${type.name})"></head>
<script th:inline="javascript">
/*<![CDATA[*/
function typeId() { return /*[[${type.id}]]*/ ''; }
/*]]>*/
</script>
<body ng-app="resourcesApp" ng-controller="resourcesController">
<nav th:replace="fragments/mainParts.html :: mainMenu('Resources: ' + ${type.name})"></nav>

View File

@ -23,6 +23,12 @@ public class SimpleResource implements Serializable {
@Column(name = "name")
private String name;
@Column(name = "type")
private String type;
@Column(name = "content_type")
private String contentType;
@Column(name = "description")
private String description;
@ -50,6 +56,22 @@ public class SimpleResource implements Serializable {
this.name = name;
}
public String getType() {
return type;
}
public void setType(final String type) {
this.type = type;
}
public String getContentType() {
return contentType;
}
public void setContentType(final String contentType) {
this.contentType = contentType;
}
public String getDescription() {
return description;
}

View File

@ -1,5 +1,7 @@
package eu.dnetlib.is.resource.repository;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
@ -8,10 +10,12 @@ import eu.dnetlib.is.resource.model.SimpleResource;
public interface SimpleResourceRepository extends JpaRepository<SimpleResource, String> {
@Query(value = "select content from resources where id = :id", nativeQuery = true)
@Query(value = "select content from resources where id = ?1", nativeQuery = true)
String getContentById(String id);
@Modifying
@Query(value = "update resources set content = :content where id = :id", nativeQuery = true)
String setContentById(String id, String content);
@Query(value = "update resources set content = ?2 where id = ?1", nativeQuery = true)
void setContentById(String id, String content);
List<SimpleResource> findByType(String type);
}

View File

@ -81,14 +81,17 @@ CREATE TABLE wf_history (
-- Other Resources
CREATE TABLE resource_types(id text PRIMARY KEY, name text);
INSERT INTO resource_types(id, name) VALUES ('transformation_rule', 'Transformation Rules'), ('cleaning_rule', 'Cleaning Rules');
INSERT INTO resource_types(id, name) VALUES
('transformation_rule', 'Transformation Rules'),
('cleaning_rule', 'Cleaning Rules'),
('hadoop_job_configuration', 'Hadoop Job Configurations');
CREATE TABLE resources (
id text PRIMARY KEY,
name text NOT NULL,
description text,
content_type text NOT NULL,
content text NOT NULL,
content_type text NOT NULL DEFAULT 'xml',
content text NOT NULL DEFAULT '',
type text NOT NULL REFERENCES resource_types(id),
creation_date timestamp NOT NULL DEFAULT now(),
modification_date timestamp NOT NULL DEFAULT now()