forked from D-Net/dnet-hadoop
switched automatic code formatting plugin to net.revelc.code.formatter:formatter-maven-plugin
This commit is contained in:
parent
d3fd05e3c5
commit
a0bdbacdae
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.maven.plugin.properties;
|
package eu.dnetlib.maven.plugin.properties;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -20,18 +21,21 @@ public class GenerateOoziePropertiesMojo extends AbstractMojo {
|
||||||
public static final String PROPERTY_NAME_WF_SOURCE_DIR = "workflow.source.dir";
|
public static final String PROPERTY_NAME_WF_SOURCE_DIR = "workflow.source.dir";
|
||||||
public static final String PROPERTY_NAME_SANDBOX_NAME = "sandboxName";
|
public static final String PROPERTY_NAME_SANDBOX_NAME = "sandboxName";
|
||||||
|
|
||||||
private final String[] limiters = {"dhp", "dnetlib", "eu"};
|
private final String[] limiters = {
|
||||||
|
"dhp", "dnetlib", "eu"
|
||||||
|
};
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void execute() throws MojoExecutionException, MojoFailureException {
|
public void execute() throws MojoExecutionException, MojoFailureException {
|
||||||
if (System.getProperties().containsKey(PROPERTY_NAME_WF_SOURCE_DIR)
|
if (System.getProperties().containsKey(PROPERTY_NAME_WF_SOURCE_DIR)
|
||||||
&& !System.getProperties().containsKey(PROPERTY_NAME_SANDBOX_NAME)) {
|
&& !System.getProperties().containsKey(PROPERTY_NAME_SANDBOX_NAME)) {
|
||||||
String generatedSandboxName =
|
String generatedSandboxName = generateSandboxName(
|
||||||
generateSandboxName(System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
||||||
if (generatedSandboxName != null) {
|
if (generatedSandboxName != null) {
|
||||||
System.getProperties().setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
System.getProperties().setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
||||||
} else {
|
} else {
|
||||||
System.out.println(
|
System.out
|
||||||
|
.println(
|
||||||
"unable to generate sandbox name from path: "
|
"unable to generate sandbox name from path: "
|
||||||
+ System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
+ System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
* express or implied. See the License for the specific language governing permissions and
|
* express or implied. See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package eu.dnetlib.maven.plugin.properties;
|
package eu.dnetlib.maven.plugin.properties;
|
||||||
|
|
||||||
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
|
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
|
||||||
|
@ -70,33 +71,31 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
protected File outputFile;
|
protected File outputFile;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true, the plugin will silently ignore any non-existent properties files, and the build will
|
* If true, the plugin will silently ignore any non-existent properties files, and the build will continue
|
||||||
* continue
|
|
||||||
*
|
*
|
||||||
* @parameter property="properties.quiet" default-value="true"
|
* @parameter property="properties.quiet" default-value="true"
|
||||||
*/
|
*/
|
||||||
private boolean quiet;
|
private boolean quiet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Comma separated list of characters to escape when writing property values. cr=carriage return,
|
* Comma separated list of characters to escape when writing property values. cr=carriage return, lf=linefeed,
|
||||||
* lf=linefeed, tab=tab. Any other values are taken literally.
|
* tab=tab. Any other values are taken literally.
|
||||||
*
|
*
|
||||||
* @parameter default-value="cr,lf,tab" property="properties.escapeChars"
|
* @parameter default-value="cr,lf,tab" property="properties.escapeChars"
|
||||||
*/
|
*/
|
||||||
private String escapeChars;
|
private String escapeChars;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true, the plugin will include system properties when writing the properties file. System
|
* If true, the plugin will include system properties when writing the properties file. System properties override
|
||||||
* properties override both environment variables and project properties.
|
* both environment variables and project properties.
|
||||||
*
|
*
|
||||||
* @parameter default-value="false" property="properties.includeSystemProperties"
|
* @parameter default-value="false" property="properties.includeSystemProperties"
|
||||||
*/
|
*/
|
||||||
private boolean includeSystemProperties;
|
private boolean includeSystemProperties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true, the plugin will include environment variables when writing the properties file.
|
* If true, the plugin will include environment variables when writing the properties file. Environment variables
|
||||||
* Environment variables are prefixed with "env". Environment variables override project
|
* are prefixed with "env". Environment variables override project properties.
|
||||||
* properties.
|
|
||||||
*
|
*
|
||||||
* @parameter default-value="false" property="properties.includeEnvironmentVariables"
|
* @parameter default-value="false" property="properties.includeEnvironmentVariables"
|
||||||
*/
|
*/
|
||||||
|
@ -110,8 +109,8 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
private String exclude;
|
private String exclude;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Comma separated set of properties to write to the properties file. If provided, only the
|
* Comma separated set of properties to write to the properties file. If provided, only the properties matching
|
||||||
* properties matching those supplied here will be written to the properties file.
|
* those supplied here will be written to the properties file.
|
||||||
*
|
*
|
||||||
* @parameter property="properties.include"
|
* @parameter property="properties.include"
|
||||||
*/
|
*/
|
||||||
|
@ -122,7 +121,9 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
* @see org.apache.maven.plugin.AbstractMojo#execute()
|
* @see org.apache.maven.plugin.AbstractMojo#execute()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
@SuppressFBWarnings({"NP_UNWRITTEN_FIELD", "UWF_UNWRITTEN_FIELD"})
|
@SuppressFBWarnings({
|
||||||
|
"NP_UNWRITTEN_FIELD", "UWF_UNWRITTEN_FIELD"
|
||||||
|
})
|
||||||
public void execute() throws MojoExecutionException, MojoFailureException {
|
public void execute() throws MojoExecutionException, MojoFailureException {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
// Add project properties
|
// Add project properties
|
||||||
|
@ -437,8 +438,8 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
*/
|
*/
|
||||||
public void setIncludePropertyKeysFromFiles(String[] includePropertyKeysFromFiles) {
|
public void setIncludePropertyKeysFromFiles(String[] includePropertyKeysFromFiles) {
|
||||||
if (includePropertyKeysFromFiles != null) {
|
if (includePropertyKeysFromFiles != null) {
|
||||||
this.includePropertyKeysFromFiles =
|
this.includePropertyKeysFromFiles = Arrays
|
||||||
Arrays.copyOf(includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
.copyOf(includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.maven.plugin.properties;
|
package eu.dnetlib.maven.plugin.properties;
|
||||||
|
|
||||||
import static eu.dnetlib.maven.plugin.properties.GenerateOoziePropertiesMojo.PROPERTY_NAME_SANDBOX_NAME;
|
import static eu.dnetlib.maven.plugin.properties.GenerateOoziePropertiesMojo.PROPERTY_NAME_SANDBOX_NAME;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.maven.plugin.properties;
|
package eu.dnetlib.maven.plugin.properties;
|
||||||
|
|
||||||
import static eu.dnetlib.maven.plugin.properties.WritePredefinedProjectProperties.PROPERTY_PREFIX_ENV;
|
import static eu.dnetlib.maven.plugin.properties.WritePredefinedProjectProperties.PROPERTY_PREFIX_ENV;
|
||||||
|
@ -20,7 +21,8 @@ import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class WritePredefinedProjectPropertiesTest {
|
public class WritePredefinedProjectPropertiesTest {
|
||||||
|
|
||||||
@Mock private MavenProject mavenProject;
|
@Mock
|
||||||
|
private MavenProject mavenProject;
|
||||||
|
|
||||||
private WritePredefinedProjectProperties mojo;
|
private WritePredefinedProjectProperties mojo;
|
||||||
|
|
||||||
|
@ -145,7 +147,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.store(new FileWriter(includedPropertiesFile), null);
|
includedProperties.store(new FileWriter(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
includedPropertiesFile.getAbsolutePath()
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -171,8 +175,11 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
projectProperties.setProperty(includedKey, includedValue);
|
projectProperties.setProperty(includedKey, includedValue);
|
||||||
doReturn(projectProperties).when(mavenProject).getProperties();
|
doReturn(projectProperties).when(mavenProject).getProperties();
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(
|
mojo
|
||||||
new String[] {"/eu/dnetlib/maven/plugin/properties/included.properties"});
|
.setIncludePropertyKeysFromFiles(
|
||||||
|
new String[] {
|
||||||
|
"/eu/dnetlib/maven/plugin/properties/included.properties"
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -197,7 +204,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
projectProperties.setProperty(includedKey, includedValue);
|
projectProperties.setProperty(includedKey, includedValue);
|
||||||
doReturn(projectProperties).when(mavenProject).getProperties();
|
doReturn(projectProperties).when(mavenProject).getProperties();
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {""});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
""
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
||||||
|
@ -221,7 +230,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.storeToXML(new FileOutputStream(includedPropertiesFile), null);
|
includedProperties.storeToXML(new FileOutputStream(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
includedPropertiesFile.getAbsolutePath()
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -252,7 +263,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.store(new FileOutputStream(includedPropertiesFile), null);
|
includedProperties.store(new FileOutputStream(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
includedPropertiesFile.getAbsolutePath()
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
||||||
|
@ -262,7 +275,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
public void testExecuteWithQuietModeOn(@TempDir File testFolder) throws Exception {
|
public void testExecuteWithQuietModeOn(@TempDir File testFolder) throws Exception {
|
||||||
// given
|
// given
|
||||||
mojo.setQuiet(true);
|
mojo.setQuiet(true);
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {"invalid location"});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
"invalid location"
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -276,7 +291,9 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
@Test
|
@Test
|
||||||
public void testExecuteIncludingPropertyKeysFromInvalidFile() {
|
public void testExecuteIncludingPropertyKeysFromInvalidFile() {
|
||||||
// given
|
// given
|
||||||
mojo.setIncludePropertyKeysFromFiles(new String[] {"invalid location"});
|
mojo.setIncludePropertyKeysFromFiles(new String[] {
|
||||||
|
"invalid location"
|
||||||
|
});
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.collector.worker.model;
|
package eu.dnetlib.collector.worker.model;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.data.mdstore.manager.common.model;
|
package eu.dnetlib.data.mdstore.manager.common.model;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.data.mdstore.manager.common.model;
|
package eu.dnetlib.data.mdstore.manager.common.model;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.data.mdstore.manager.common.model;
|
package eu.dnetlib.data.mdstore.manager.common.model;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.data.mdstore.manager.common.model;
|
package eu.dnetlib.data.mdstore.manager.common.model;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.application;
|
package eu.dnetlib.dhp.application;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
@ -21,8 +22,7 @@ public class ArgumentApplicationParser implements Serializable {
|
||||||
|
|
||||||
public ArgumentApplicationParser(final String json_configuration) throws Exception {
|
public ArgumentApplicationParser(final String json_configuration) throws Exception {
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
final OptionsParameter[] configuration =
|
final OptionsParameter[] configuration = mapper.readValue(json_configuration, OptionsParameter[].class);
|
||||||
mapper.readValue(json_configuration, OptionsParameter[].class);
|
|
||||||
createOptionMap(configuration);
|
createOptionMap(configuration);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +32,8 @@ public class ArgumentApplicationParser implements Serializable {
|
||||||
|
|
||||||
private void createOptionMap(final OptionsParameter[] configuration) {
|
private void createOptionMap(final OptionsParameter[] configuration) {
|
||||||
|
|
||||||
Arrays.stream(configuration)
|
Arrays
|
||||||
|
.stream(configuration)
|
||||||
.map(
|
.map(
|
||||||
conf -> {
|
conf -> {
|
||||||
final Option o = new Option(conf.getParamName(), true, conf.getParamDescription());
|
final Option o = new Option(conf.getParamName(), true, conf.getParamDescription());
|
||||||
|
@ -74,10 +75,11 @@ public class ArgumentApplicationParser implements Serializable {
|
||||||
public void parseArgument(final String[] args) throws Exception {
|
public void parseArgument(final String[] args) throws Exception {
|
||||||
CommandLineParser parser = new BasicParser();
|
CommandLineParser parser = new BasicParser();
|
||||||
CommandLine cmd = parser.parse(options, args);
|
CommandLine cmd = parser.parse(options, args);
|
||||||
Arrays.stream(cmd.getOptions())
|
Arrays
|
||||||
|
.stream(cmd.getOptions())
|
||||||
.forEach(
|
.forEach(
|
||||||
it ->
|
it -> objectMap
|
||||||
objectMap.put(
|
.put(
|
||||||
it.getLongOpt(),
|
it.getLongOpt(),
|
||||||
compressedValues.contains(it.getLongOpt())
|
compressedValues.contains(it.getLongOpt())
|
||||||
? decompressValue(it.getValue())
|
? decompressValue(it.getValue())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.application;
|
package eu.dnetlib.dhp.application;
|
||||||
|
|
||||||
public class OptionsParameter {
|
public class OptionsParameter {
|
||||||
|
@ -8,7 +9,8 @@ public class OptionsParameter {
|
||||||
private boolean paramRequired;
|
private boolean paramRequired;
|
||||||
private boolean compressed;
|
private boolean compressed;
|
||||||
|
|
||||||
public OptionsParameter() {}
|
public OptionsParameter() {
|
||||||
|
}
|
||||||
|
|
||||||
public String getParamName() {
|
public String getParamName() {
|
||||||
return paramName;
|
return paramName;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -6,16 +7,18 @@ import java.util.function.Supplier;
|
||||||
/** Provides serializable and throwing extensions to standard functional interfaces. */
|
/** Provides serializable and throwing extensions to standard functional interfaces. */
|
||||||
public class FunctionalInterfaceSupport {
|
public class FunctionalInterfaceSupport {
|
||||||
|
|
||||||
private FunctionalInterfaceSupport() {}
|
private FunctionalInterfaceSupport() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Serializable supplier of any kind of objects. To be used withing spark processing pipelines
|
* Serializable supplier of any kind of objects. To be used withing spark processing pipelines when supplying
|
||||||
* when supplying functions externally.
|
* functions externally.
|
||||||
*
|
*
|
||||||
* @param <T>
|
* @param <T>
|
||||||
*/
|
*/
|
||||||
@FunctionalInterface
|
@FunctionalInterface
|
||||||
public interface SerializableSupplier<T> extends Supplier<T>, Serializable {}
|
public interface SerializableSupplier<T> extends Supplier<T>, Serializable {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extension of consumer accepting functions throwing an exception.
|
* Extension of consumer accepting functions throwing an exception.
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.ThrowingSupport.rethrowAsRuntimeException;
|
import static eu.dnetlib.dhp.common.ThrowingSupport.rethrowAsRuntimeException;
|
||||||
|
@ -16,7 +17,8 @@ import org.slf4j.LoggerFactory;
|
||||||
public class HdfsSupport {
|
public class HdfsSupport {
|
||||||
private static final Logger logger = LoggerFactory.getLogger(HdfsSupport.class);
|
private static final Logger logger = LoggerFactory.getLogger(HdfsSupport.class);
|
||||||
|
|
||||||
private HdfsSupport() {}
|
private HdfsSupport() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks a path (file or dir) exists on HDFS.
|
* Checks a path (file or dir) exists on HDFS.
|
||||||
|
@ -62,8 +64,8 @@ public class HdfsSupport {
|
||||||
public static List<String> listFiles(String path, Configuration configuration) {
|
public static List<String> listFiles(String path, Configuration configuration) {
|
||||||
logger.info("Listing files in path: {}", path);
|
logger.info("Listing files in path: {}", path);
|
||||||
return rethrowAsRuntimeException(
|
return rethrowAsRuntimeException(
|
||||||
() ->
|
() -> Arrays
|
||||||
Arrays.stream(FileSystem.get(configuration).listStatus(new Path(path)))
|
.stream(FileSystem.get(configuration).listStatus(new Path(path)))
|
||||||
.filter(FileStatus::isDirectory)
|
.filter(FileStatus::isDirectory)
|
||||||
.map(x -> x.getPath().toString())
|
.map(x -> x.getPath().toString())
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingConsumer;
|
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingConsumer;
|
||||||
|
@ -9,12 +10,12 @@ import org.apache.spark.sql.SparkSession;
|
||||||
/** SparkSession utility methods. */
|
/** SparkSession utility methods. */
|
||||||
public class SparkSessionSupport {
|
public class SparkSessionSupport {
|
||||||
|
|
||||||
private SparkSessionSupport() {}
|
private SparkSessionSupport() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a given function using SparkSession created using default builder and supplied SparkConf.
|
* Runs a given function using SparkSession created using default builder and supplied SparkConf. Stops SparkSession
|
||||||
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
* when SparkSession is managed. Allows to reuse SparkSession created externally.
|
||||||
* externally.
|
|
||||||
*
|
*
|
||||||
* @param conf SparkConf instance
|
* @param conf SparkConf instance
|
||||||
* @param isSparkSessionManaged When true will stop SparkSession
|
* @param isSparkSessionManaged When true will stop SparkSession
|
||||||
|
@ -27,9 +28,8 @@ public class SparkSessionSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a given function using SparkSession created with hive support and using default builder
|
* Runs a given function using SparkSession created with hive support and using default builder and supplied
|
||||||
* and supplied SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse
|
* SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created externally.
|
||||||
* SparkSession created externally.
|
|
||||||
*
|
*
|
||||||
* @param conf SparkConf instance
|
* @param conf SparkConf instance
|
||||||
* @param isSparkSessionManaged When true will stop SparkSession
|
* @param isSparkSessionManaged When true will stop SparkSession
|
||||||
|
@ -45,9 +45,8 @@ public class SparkSessionSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a given function using SparkSession created using supplied builder and supplied SparkConf.
|
* Runs a given function using SparkSession created using supplied builder and supplied SparkConf. Stops
|
||||||
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
* SparkSession when SparkSession is managed. Allows to reuse SparkSession created externally.
|
||||||
* externally.
|
|
||||||
*
|
*
|
||||||
* @param sparkSessionBuilder Builder of SparkSession
|
* @param sparkSessionBuilder Builder of SparkSession
|
||||||
* @param conf SparkConf instance
|
* @param conf SparkConf instance
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingRunnable;
|
import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingRunnable;
|
||||||
|
@ -6,7 +7,8 @@ import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.ThrowingSupplier;
|
||||||
/** Exception handling utility methods. */
|
/** Exception handling utility methods. */
|
||||||
public class ThrowingSupport {
|
public class ThrowingSupport {
|
||||||
|
|
||||||
private ThrowingSupport() {}
|
private ThrowingSupport() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes given runnable and rethrows any exceptions as RuntimeException.
|
* Executes given runnable and rethrows any exceptions as RuntimeException.
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.model.mdstore;
|
package eu.dnetlib.dhp.model.mdstore;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
@ -16,8 +17,7 @@ public class MetadataRecord implements Serializable {
|
||||||
private String encoding;
|
private String encoding;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The information about the provenance of the record see @{@link Provenance} for the model of
|
* The information about the provenance of the record see @{@link Provenance} for the model of this information
|
||||||
* this information
|
|
||||||
*/
|
*/
|
||||||
private Provenance provenance;
|
private Provenance provenance;
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.model.mdstore;
|
package eu.dnetlib.dhp.model.mdstore;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Sandro La Bruzzo
|
* @author Sandro La Bruzzo
|
||||||
* <p>Provenace class models the provenance of the record in the metadataStore It contains the
|
* <p>
|
||||||
* identifier and the name of the datasource that gives the record
|
* Provenace class models the provenance of the record in the metadataStore It contains the identifier and the
|
||||||
|
* name of the datasource that gives the record
|
||||||
*/
|
*/
|
||||||
public class Provenance implements Serializable {
|
public class Provenance implements Serializable {
|
||||||
|
|
||||||
|
@ -15,7 +17,8 @@ public class Provenance implements Serializable {
|
||||||
|
|
||||||
private String nsPrefix;
|
private String nsPrefix;
|
||||||
|
|
||||||
public Provenance() {}
|
public Provenance() {
|
||||||
|
}
|
||||||
|
|
||||||
public Provenance(String datasourceId, String datasourceName, String nsPrefix) {
|
public Provenance(String datasourceId, String datasourceName, String nsPrefix) {
|
||||||
this.datasourceId = datasourceId;
|
this.datasourceId = datasourceId;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.parser.utility;
|
package eu.dnetlib.dhp.parser.utility;
|
||||||
|
|
||||||
public class VtdException extends Exception {
|
public class VtdException extends Exception {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.parser.utility;
|
package eu.dnetlib.dhp.parser.utility;
|
||||||
|
|
||||||
import com.ximpleware.AutoPilot;
|
import com.ximpleware.AutoPilot;
|
||||||
|
@ -36,7 +37,8 @@ public class VtdUtilityParser {
|
||||||
final Map<String, String> currentAttributes = new HashMap<>();
|
final Map<String, String> currentAttributes = new HashMap<>();
|
||||||
if (attributes != null) {
|
if (attributes != null) {
|
||||||
|
|
||||||
attributes.forEach(
|
attributes
|
||||||
|
.forEach(
|
||||||
attributeKey -> {
|
attributeKey -> {
|
||||||
try {
|
try {
|
||||||
int attr = vn.getAttrVal(attributeKey);
|
int attr = vn.getAttrVal(attributeKey);
|
||||||
|
@ -58,7 +60,8 @@ public class VtdUtilityParser {
|
||||||
ap.selectXPath(xpath);
|
ap.selectXPath(xpath);
|
||||||
while (ap.evalXPath() != -1) {
|
while (ap.evalXPath() != -1) {
|
||||||
int t = vn.getText();
|
int t = vn.getText();
|
||||||
if (t > -1) results.add(vn.toNormalizedString(t));
|
if (t > -1)
|
||||||
|
results.add(vn.toNormalizedString(t));
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -72,7 +75,8 @@ public class VtdUtilityParser {
|
||||||
ap.selectXPath(xpath);
|
ap.selectXPath(xpath);
|
||||||
while (ap.evalXPath() != -1) {
|
while (ap.evalXPath() != -1) {
|
||||||
int it = nav.getText();
|
int it = nav.getText();
|
||||||
if (it > -1) return nav.toNormalizedString(it);
|
if (it > -1)
|
||||||
|
return nav.toNormalizedString(it);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils;
|
package eu.dnetlib.dhp.utils;
|
||||||
|
|
||||||
import com.jayway.jsonpath.JsonPath;
|
import com.jayway.jsonpath.JsonPath;
|
||||||
|
@ -59,7 +60,8 @@ public class DHPUtils {
|
||||||
public static String getJPathString(final String jsonPath, final String json) {
|
public static String getJPathString(final String jsonPath, final String json) {
|
||||||
try {
|
try {
|
||||||
Object o = JsonPath.read(json, jsonPath);
|
Object o = JsonPath.read(json, jsonPath);
|
||||||
if (o instanceof String) return (String) o;
|
if (o instanceof String)
|
||||||
|
return (String) o;
|
||||||
if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
|
if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
|
||||||
return (String) ((JSONArray) o).get(0);
|
return (String) ((JSONArray) o).get(0);
|
||||||
return o.toString();
|
return o.toString();
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils;
|
package eu.dnetlib.dhp.utils;
|
||||||
|
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils.saxon;
|
package eu.dnetlib.dhp.utils.saxon;
|
||||||
|
|
||||||
import net.sf.saxon.expr.XPathContext;
|
import net.sf.saxon.expr.XPathContext;
|
||||||
|
@ -9,8 +10,7 @@ import net.sf.saxon.trans.XPathException;
|
||||||
|
|
||||||
public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinition {
|
public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinition {
|
||||||
|
|
||||||
public static String DEFAULT_SAXON_EXT_NS_URI =
|
public static String DEFAULT_SAXON_EXT_NS_URI = "http://www.d-net.research-infrastructures.eu/saxon-extension";
|
||||||
"http://www.d-net.research-infrastructures.eu/saxon-extension";
|
|
||||||
|
|
||||||
public abstract String getName();
|
public abstract String getName();
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils.saxon;
|
package eu.dnetlib.dhp.utils.saxon;
|
||||||
|
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
|
@ -13,7 +14,9 @@ import net.sf.saxon.value.StringValue;
|
||||||
|
|
||||||
public class ExtractYear extends AbstractExtensionFunction {
|
public class ExtractYear extends AbstractExtensionFunction {
|
||||||
|
|
||||||
private static final String[] dateFormats = {"yyyy-MM-dd", "yyyy/MM/dd"};
|
private static final String[] dateFormats = {
|
||||||
|
"yyyy-MM-dd", "yyyy/MM/dd"
|
||||||
|
};
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -44,7 +47,9 @@ public class ExtractYear extends AbstractExtensionFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceType[] getArgumentTypes() {
|
public SequenceType[] getArgumentTypes() {
|
||||||
return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
|
return new SequenceType[] {
|
||||||
|
SequenceType.OPTIONAL_ITEM
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils.saxon;
|
package eu.dnetlib.dhp.utils.saxon;
|
||||||
|
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
|
@ -43,7 +44,9 @@ public class NormalizeDate extends AbstractExtensionFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceType[] getArgumentTypes() {
|
public SequenceType[] getArgumentTypes() {
|
||||||
return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
|
return new SequenceType[] {
|
||||||
|
SequenceType.OPTIONAL_ITEM
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils.saxon;
|
package eu.dnetlib.dhp.utils.saxon;
|
||||||
|
|
||||||
import net.sf.saxon.expr.XPathContext;
|
import net.sf.saxon.expr.XPathContext;
|
||||||
|
@ -49,7 +50,9 @@ public class PickFirst extends AbstractExtensionFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SequenceType[] getArgumentTypes() {
|
public SequenceType[] getArgumentTypes() {
|
||||||
return new SequenceType[] {SequenceType.OPTIONAL_ITEM};
|
return new SequenceType[] {
|
||||||
|
SequenceType.OPTIONAL_ITEM
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.utils.saxon;
|
package eu.dnetlib.dhp.utils.saxon;
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.message;
|
package eu.dnetlib.message;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
@ -20,7 +21,8 @@ public class Message {
|
||||||
return jsonMapper.readValue(json, Message.class);
|
return jsonMapper.readValue(json, Message.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Message() {}
|
public Message() {
|
||||||
|
}
|
||||||
|
|
||||||
public Message(String workflowId, String jobName, MessageType type, Map<String, String> body) {
|
public Message(String workflowId, String jobName, MessageType type, Map<String, String> body) {
|
||||||
this.workflowId = workflowId;
|
this.workflowId = workflowId;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.message;
|
package eu.dnetlib.message;
|
||||||
|
|
||||||
import com.rabbitmq.client.AMQP;
|
import com.rabbitmq.client.AMQP;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.message;
|
package eu.dnetlib.message;
|
||||||
|
|
||||||
import com.rabbitmq.client.Channel;
|
import com.rabbitmq.client.Channel;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.message;
|
package eu.dnetlib.message;
|
||||||
|
|
||||||
public enum MessageType {
|
public enum MessageType {
|
||||||
ONGOING,
|
ONGOING, REPORT
|
||||||
REPORT
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.scholexplorer.relation;
|
package eu.dnetlib.scholexplorer.relation;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.scholexplorer.relation;
|
package eu.dnetlib.scholexplorer.relation;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
@ -9,8 +10,7 @@ public class RelationMapper extends HashMap<String, RelInfo> implements Serializ
|
||||||
|
|
||||||
public static RelationMapper load() throws Exception {
|
public static RelationMapper load() throws Exception {
|
||||||
|
|
||||||
final String json =
|
final String json = IOUtils.toString(RelationMapper.class.getResourceAsStream("relations.json"));
|
||||||
IOUtils.toString(RelationMapper.class.getResourceAsStream("relations.json"));
|
|
||||||
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
return mapper.readValue(json, RelationMapper.class);
|
return mapper.readValue(json, RelationMapper.class);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.application;
|
package eu.dnetlib.dhp.application;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
@ -10,12 +11,13 @@ public class ArgumentApplicationParserTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParseParameter() throws Exception {
|
public void testParseParameter() throws Exception {
|
||||||
final String jsonConfiguration =
|
final String jsonConfiguration = IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
||||||
assertNotNull(jsonConfiguration);
|
assertNotNull(jsonConfiguration);
|
||||||
ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(
|
parser
|
||||||
|
.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-p",
|
"-p",
|
||||||
"value0",
|
"value0",
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
@ -65,8 +66,9 @@ public class HdfsSupportTest {
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertEquals(2, paths.size());
|
assertEquals(2, paths.size());
|
||||||
List<String> expecteds =
|
List<String> expecteds = Arrays.stream(new String[] {
|
||||||
Arrays.stream(new String[] {subDir1.toString(), subDir2.toString()})
|
subDir1.toString(), subDir2.toString()
|
||||||
|
})
|
||||||
.sorted()
|
.sorted()
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
List<String> actuals = paths.stream().sorted().collect(Collectors.toList());
|
List<String> actuals = paths.stream().sorted().collect(Collectors.toList());
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
import static org.mockito.Mockito.*;
|
import static org.mockito.Mockito.*;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.model.mdstore;
|
package eu.dnetlib.dhp.model.mdstore;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.message;
|
package eu.dnetlib.message;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
@ -33,8 +34,7 @@ public class MessageTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void toStringTest() {
|
public void toStringTest() {
|
||||||
final String expectedJson =
|
final String expectedJson = "{\"workflowId\":\"wId\",\"jobName\":\"Collection\",\"type\":\"ONGOING\",\"body\":{\"ExecutionTime\":\"30s\",\"parsedItem\":\"300\"}}";
|
||||||
"{\"workflowId\":\"wId\",\"jobName\":\"Collection\",\"type\":\"ONGOING\",\"body\":{\"ExecutionTime\":\"30s\",\"parsedItem\":\"300\"}}";
|
|
||||||
Message m = new Message();
|
Message m = new Message();
|
||||||
m.setWorkflowId("wId");
|
m.setWorkflowId("wId");
|
||||||
m.setType(MessageType.ONGOING);
|
m.setType(MessageType.ONGOING);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.scholexplorer.relation;
|
package eu.dnetlib.scholexplorer.relation;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.action;
|
package eu.dnetlib.dhp.schema.action;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||||
|
@ -11,7 +12,8 @@ public class AtomicAction<T extends Oaf> implements Serializable {
|
||||||
|
|
||||||
private T payload;
|
private T payload;
|
||||||
|
|
||||||
public AtomicAction() {}
|
public AtomicAction() {
|
||||||
|
}
|
||||||
|
|
||||||
public AtomicAction(Class<T> clazz, T payload) {
|
public AtomicAction(Class<T> clazz, T payload) {
|
||||||
this.clazz = clazz;
|
this.clazz = clazz;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.action;
|
package eu.dnetlib.dhp.schema.action;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonParser;
|
import com.fasterxml.jackson.core.JsonParser;
|
||||||
|
|
|
@ -1,16 +1,11 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.common;
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
|
||||||
/** Actual entity types in the Graph */
|
/** Actual entity types in the Graph */
|
||||||
public enum EntityType {
|
public enum EntityType {
|
||||||
publication,
|
publication, dataset, otherresearchproduct, software, datasource, organization, project;
|
||||||
dataset,
|
|
||||||
otherresearchproduct,
|
|
||||||
software,
|
|
||||||
datasource,
|
|
||||||
organization,
|
|
||||||
project;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves the EntityType, given the relative class name
|
* Resolves the EntityType, given the relative class name
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.common;
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
/** Main entity types in the Graph */
|
/** Main entity types in the Graph */
|
||||||
public enum MainEntityType {
|
public enum MainEntityType {
|
||||||
result,
|
result, datasource, organization, project
|
||||||
datasource,
|
|
||||||
organization,
|
|
||||||
project
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.common;
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.common;
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
@ -52,7 +53,8 @@ public class ModelSupport {
|
||||||
|
|
||||||
private static final String schemeTemplate = "dnet:%s_%s_relations";
|
private static final String schemeTemplate = "dnet:%s_%s_relations";
|
||||||
|
|
||||||
private ModelSupport() {}
|
private ModelSupport() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks subclass-superclass relationship.
|
* Checks subclass-superclass relationship.
|
||||||
|
@ -142,7 +144,8 @@ public class ModelSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getScheme(final String sourceType, final String targetType) {
|
public static String getScheme(final String sourceType, final String targetType) {
|
||||||
return String.format(
|
return String
|
||||||
|
.format(
|
||||||
schemeTemplate,
|
schemeTemplate,
|
||||||
entityMapping.get(EntityType.valueOf(sourceType)).name(),
|
entityMapping.get(EntityType.valueOf(sourceType)).name(),
|
||||||
entityMapping.get(EntityType.valueOf(targetType)).name());
|
entityMapping.get(EntityType.valueOf(targetType)).name());
|
||||||
|
@ -159,29 +162,31 @@ public class ModelSupport {
|
||||||
|
|
||||||
private static <T extends Oaf> String idFnForRelation(T t) {
|
private static <T extends Oaf> String idFnForRelation(T t) {
|
||||||
Relation r = (Relation) t;
|
Relation r = (Relation) t;
|
||||||
return Optional.ofNullable(r.getSource())
|
return Optional
|
||||||
|
.ofNullable(r.getSource())
|
||||||
.map(
|
.map(
|
||||||
source ->
|
source -> Optional
|
||||||
Optional.ofNullable(r.getTarget())
|
.ofNullable(r.getTarget())
|
||||||
.map(
|
.map(
|
||||||
target ->
|
target -> Optional
|
||||||
Optional.ofNullable(r.getRelType())
|
.ofNullable(r.getRelType())
|
||||||
.map(
|
.map(
|
||||||
relType ->
|
relType -> Optional
|
||||||
Optional.ofNullable(r.getSubRelType())
|
.ofNullable(r.getSubRelType())
|
||||||
.map(
|
.map(
|
||||||
subRelType ->
|
subRelType -> Optional
|
||||||
Optional.ofNullable(r.getRelClass())
|
.ofNullable(r.getRelClass())
|
||||||
.map(
|
.map(
|
||||||
relClass ->
|
relClass -> String
|
||||||
String.join(
|
.join(
|
||||||
source,
|
source,
|
||||||
target,
|
target,
|
||||||
relType,
|
relType,
|
||||||
subRelType,
|
subRelType,
|
||||||
relClass))
|
relClass))
|
||||||
.orElse(
|
.orElse(
|
||||||
String.join(
|
String
|
||||||
|
.join(
|
||||||
source,
|
source,
|
||||||
target,
|
target,
|
||||||
relType,
|
relType,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -68,8 +69,10 @@ public class Author implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
Author author = (Author) o;
|
Author author = (Author) o;
|
||||||
return Objects.equals(fullname, author.fullname)
|
return Objects.equals(fullname, author.fullname)
|
||||||
&& Objects.equals(name, author.name)
|
&& Objects.equals(name, author.name)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -31,9 +32,12 @@ public class Context implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
Context other = (Context) obj;
|
Context other = (Context) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -16,9 +17,12 @@ public class Country extends Qualifier {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
if (!super.equals(o)) return false;
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
if (!super.equals(o))
|
||||||
|
return false;
|
||||||
Country country = (Country) o;
|
Country country = (Country) o;
|
||||||
return Objects.equals(dataInfo, country.dataInfo);
|
return Objects.equals(dataInfo, country.dataInfo);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -62,8 +63,10 @@ public class DataInfo implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
DataInfo dataInfo = (DataInfo) o;
|
DataInfo dataInfo = (DataInfo) o;
|
||||||
return Objects.equals(invisible, dataInfo.invisible)
|
return Objects.equals(invisible, dataInfo.invisible)
|
||||||
&& Objects.equals(inferred, dataInfo.inferred)
|
&& Objects.equals(inferred, dataInfo.inferred)
|
||||||
|
@ -75,7 +78,8 @@ public class DataInfo implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(
|
return Objects
|
||||||
|
.hash(
|
||||||
invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
|
invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
@ -90,8 +91,7 @@ public class Dataset extends Result implements Serializable {
|
||||||
|
|
||||||
final Dataset d = (Dataset) e;
|
final Dataset d = (Dataset) e;
|
||||||
|
|
||||||
storagedate =
|
storagedate = d.getStoragedate() != null && compareTrust(this, e) < 0 ? d.getStoragedate() : storagedate;
|
||||||
d.getStoragedate() != null && compareTrust(this, e) < 0 ? d.getStoragedate() : storagedate;
|
|
||||||
|
|
||||||
device = d.getDevice() != null && compareTrust(this, e) < 0 ? d.getDevice() : device;
|
device = d.getDevice() != null && compareTrust(this, e) < 0 ? d.getDevice() : device;
|
||||||
|
|
||||||
|
@ -99,13 +99,11 @@ public class Dataset extends Result implements Serializable {
|
||||||
|
|
||||||
version = d.getVersion() != null && compareTrust(this, e) < 0 ? d.getVersion() : version;
|
version = d.getVersion() != null && compareTrust(this, e) < 0 ? d.getVersion() : version;
|
||||||
|
|
||||||
lastmetadataupdate =
|
lastmetadataupdate = d.getLastmetadataupdate() != null && compareTrust(this, e) < 0
|
||||||
d.getLastmetadataupdate() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getLastmetadataupdate()
|
? d.getLastmetadataupdate()
|
||||||
: lastmetadataupdate;
|
: lastmetadataupdate;
|
||||||
|
|
||||||
metadataversionnumber =
|
metadataversionnumber = d.getMetadataversionnumber() != null && compareTrust(this, e) < 0
|
||||||
d.getMetadataversionnumber() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getMetadataversionnumber()
|
? d.getMetadataversionnumber()
|
||||||
: metadataversionnumber;
|
: metadataversionnumber;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -372,120 +373,93 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
|
|
||||||
Datasource d = (Datasource) e;
|
Datasource d = (Datasource) e;
|
||||||
|
|
||||||
datasourcetype =
|
datasourcetype = d.getDatasourcetype() != null && compareTrust(this, e) < 0
|
||||||
d.getDatasourcetype() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDatasourcetype()
|
? d.getDatasourcetype()
|
||||||
: datasourcetype;
|
: datasourcetype;
|
||||||
openairecompatibility =
|
openairecompatibility = d.getOpenairecompatibility() != null && compareTrust(this, e) < 0
|
||||||
d.getOpenairecompatibility() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getOpenairecompatibility()
|
? d.getOpenairecompatibility()
|
||||||
: openairecompatibility;
|
: openairecompatibility;
|
||||||
officialname =
|
officialname = d.getOfficialname() != null && compareTrust(this, e) < 0
|
||||||
d.getOfficialname() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getOfficialname()
|
? d.getOfficialname()
|
||||||
: officialname;
|
: officialname;
|
||||||
englishname =
|
englishname = d.getEnglishname() != null && compareTrust(this, e) < 0 ? d.getEnglishname() : officialname;
|
||||||
d.getEnglishname() != null && compareTrust(this, e) < 0 ? d.getEnglishname() : officialname;
|
websiteurl = d.getWebsiteurl() != null && compareTrust(this, e) < 0 ? d.getWebsiteurl() : websiteurl;
|
||||||
websiteurl =
|
|
||||||
d.getWebsiteurl() != null && compareTrust(this, e) < 0 ? d.getWebsiteurl() : websiteurl;
|
|
||||||
logourl = d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
logourl = d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
||||||
contactemail =
|
contactemail = d.getContactemail() != null && compareTrust(this, e) < 0
|
||||||
d.getContactemail() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getContactemail()
|
? d.getContactemail()
|
||||||
: contactemail;
|
: contactemail;
|
||||||
namespaceprefix =
|
namespaceprefix = d.getNamespaceprefix() != null && compareTrust(this, e) < 0
|
||||||
d.getNamespaceprefix() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getNamespaceprefix()
|
? d.getNamespaceprefix()
|
||||||
: namespaceprefix;
|
: namespaceprefix;
|
||||||
latitude = d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
latitude = d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
||||||
longitude =
|
longitude = d.getLongitude() != null && compareTrust(this, e) < 0 ? d.getLongitude() : longitude;
|
||||||
d.getLongitude() != null && compareTrust(this, e) < 0 ? d.getLongitude() : longitude;
|
dateofvalidation = d.getDateofvalidation() != null && compareTrust(this, e) < 0
|
||||||
dateofvalidation =
|
|
||||||
d.getDateofvalidation() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDateofvalidation()
|
? d.getDateofvalidation()
|
||||||
: dateofvalidation;
|
: dateofvalidation;
|
||||||
description =
|
description = d.getDescription() != null && compareTrust(this, e) < 0 ? d.getDescription() : description;
|
||||||
d.getDescription() != null && compareTrust(this, e) < 0 ? d.getDescription() : description;
|
|
||||||
subjects = mergeLists(subjects, d.getSubjects());
|
subjects = mergeLists(subjects, d.getSubjects());
|
||||||
|
|
||||||
// opendoar specific fields (od*)
|
// opendoar specific fields (od*)
|
||||||
odnumberofitems =
|
odnumberofitems = d.getOdnumberofitems() != null && compareTrust(this, e) < 0
|
||||||
d.getOdnumberofitems() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getOdnumberofitems()
|
? d.getOdnumberofitems()
|
||||||
: odnumberofitems;
|
: odnumberofitems;
|
||||||
odnumberofitemsdate =
|
odnumberofitemsdate = d.getOdnumberofitemsdate() != null && compareTrust(this, e) < 0
|
||||||
d.getOdnumberofitemsdate() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getOdnumberofitemsdate()
|
? d.getOdnumberofitemsdate()
|
||||||
: odnumberofitemsdate;
|
: odnumberofitemsdate;
|
||||||
odpolicies =
|
odpolicies = d.getOdpolicies() != null && compareTrust(this, e) < 0 ? d.getOdpolicies() : odpolicies;
|
||||||
d.getOdpolicies() != null && compareTrust(this, e) < 0 ? d.getOdpolicies() : odpolicies;
|
|
||||||
odlanguages = mergeLists(odlanguages, d.getOdlanguages());
|
odlanguages = mergeLists(odlanguages, d.getOdlanguages());
|
||||||
odcontenttypes = mergeLists(odcontenttypes, d.getOdcontenttypes());
|
odcontenttypes = mergeLists(odcontenttypes, d.getOdcontenttypes());
|
||||||
accessinfopackage = mergeLists(accessinfopackage, d.getAccessinfopackage());
|
accessinfopackage = mergeLists(accessinfopackage, d.getAccessinfopackage());
|
||||||
|
|
||||||
// re3data fields
|
// re3data fields
|
||||||
releasestartdate =
|
releasestartdate = d.getReleasestartdate() != null && compareTrust(this, e) < 0
|
||||||
d.getReleasestartdate() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getReleasestartdate()
|
? d.getReleasestartdate()
|
||||||
: releasestartdate;
|
: releasestartdate;
|
||||||
releaseenddate =
|
releaseenddate = d.getReleaseenddate() != null && compareTrust(this, e) < 0
|
||||||
d.getReleaseenddate() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getReleaseenddate()
|
? d.getReleaseenddate()
|
||||||
: releaseenddate;
|
: releaseenddate;
|
||||||
missionstatementurl =
|
missionstatementurl = d.getMissionstatementurl() != null && compareTrust(this, e) < 0
|
||||||
d.getMissionstatementurl() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getMissionstatementurl()
|
? d.getMissionstatementurl()
|
||||||
: missionstatementurl;
|
: missionstatementurl;
|
||||||
dataprovider =
|
dataprovider = d.getDataprovider() != null && compareTrust(this, e) < 0
|
||||||
d.getDataprovider() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDataprovider()
|
? d.getDataprovider()
|
||||||
: dataprovider;
|
: dataprovider;
|
||||||
serviceprovider =
|
serviceprovider = d.getServiceprovider() != null && compareTrust(this, e) < 0
|
||||||
d.getServiceprovider() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getServiceprovider()
|
? d.getServiceprovider()
|
||||||
: serviceprovider;
|
: serviceprovider;
|
||||||
|
|
||||||
// {open, restricted or closed}
|
// {open, restricted or closed}
|
||||||
databaseaccesstype =
|
databaseaccesstype = d.getDatabaseaccesstype() != null && compareTrust(this, e) < 0
|
||||||
d.getDatabaseaccesstype() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDatabaseaccesstype()
|
? d.getDatabaseaccesstype()
|
||||||
: databaseaccesstype;
|
: databaseaccesstype;
|
||||||
|
|
||||||
// {open, restricted or closed}
|
// {open, restricted or closed}
|
||||||
datauploadtype =
|
datauploadtype = d.getDatauploadtype() != null && compareTrust(this, e) < 0
|
||||||
d.getDatauploadtype() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDatauploadtype()
|
? d.getDatauploadtype()
|
||||||
: datauploadtype;
|
: datauploadtype;
|
||||||
|
|
||||||
// {feeRequired, registration, other}
|
// {feeRequired, registration, other}
|
||||||
databaseaccessrestriction =
|
databaseaccessrestriction = d.getDatabaseaccessrestriction() != null && compareTrust(this, e) < 0
|
||||||
d.getDatabaseaccessrestriction() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDatabaseaccessrestriction()
|
? d.getDatabaseaccessrestriction()
|
||||||
: databaseaccessrestriction;
|
: databaseaccessrestriction;
|
||||||
|
|
||||||
// {feeRequired, registration, other}
|
// {feeRequired, registration, other}
|
||||||
datauploadrestriction =
|
datauploadrestriction = d.getDatauploadrestriction() != null && compareTrust(this, e) < 0
|
||||||
d.getDatauploadrestriction() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getDatauploadrestriction()
|
? d.getDatauploadrestriction()
|
||||||
: datauploadrestriction;
|
: datauploadrestriction;
|
||||||
|
|
||||||
versioning =
|
versioning = d.getVersioning() != null && compareTrust(this, e) < 0 ? d.getVersioning() : versioning;
|
||||||
d.getVersioning() != null && compareTrust(this, e) < 0 ? d.getVersioning() : versioning;
|
citationguidelineurl = d.getCitationguidelineurl() != null && compareTrust(this, e) < 0
|
||||||
citationguidelineurl =
|
|
||||||
d.getCitationguidelineurl() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getCitationguidelineurl()
|
? d.getCitationguidelineurl()
|
||||||
: citationguidelineurl;
|
: citationguidelineurl;
|
||||||
|
|
||||||
// {yes, no, unknown}
|
// {yes, no, unknown}
|
||||||
qualitymanagementkind =
|
qualitymanagementkind = d.getQualitymanagementkind() != null && compareTrust(this, e) < 0
|
||||||
d.getQualitymanagementkind() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getQualitymanagementkind()
|
? d.getQualitymanagementkind()
|
||||||
: qualitymanagementkind;
|
: qualitymanagementkind;
|
||||||
pidsystems =
|
pidsystems = d.getPidsystems() != null && compareTrust(this, e) < 0 ? d.getPidsystems() : pidsystems;
|
||||||
d.getPidsystems() != null && compareTrust(this, e) < 0 ? d.getPidsystems() : pidsystems;
|
|
||||||
|
|
||||||
certificates =
|
certificates = d.getCertificates() != null && compareTrust(this, e) < 0
|
||||||
d.getCertificates() != null && compareTrust(this, e) < 0
|
|
||||||
? d.getCertificates()
|
? d.getCertificates()
|
||||||
: certificates;
|
: certificates;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -94,8 +95,10 @@ public class ExternalReference implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
ExternalReference that = (ExternalReference) o;
|
ExternalReference that = (ExternalReference) o;
|
||||||
return Objects.equals(sitename, that.sitename)
|
return Objects.equals(sitename, that.sitename)
|
||||||
&& Objects.equals(label, that.label)
|
&& Objects.equals(label, that.label)
|
||||||
|
@ -109,7 +112,8 @@ public class ExternalReference implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(
|
return Objects
|
||||||
|
.hash(
|
||||||
sitename, label, url, description, qualifier, refidentifier, query, dataInfo);
|
sitename, label, url, description, qualifier, refidentifier, query, dataInfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -57,8 +58,10 @@ public class ExtraInfo implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
ExtraInfo extraInfo = (ExtraInfo) o;
|
ExtraInfo extraInfo = (ExtraInfo) o;
|
||||||
return Objects.equals(name, extraInfo.name)
|
return Objects.equals(name, extraInfo.name)
|
||||||
&& Objects.equals(typology, extraInfo.typology)
|
&& Objects.equals(typology, extraInfo.typology)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -31,9 +32,12 @@ public class Field<T> implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
Field<T> other = (Field<T>) obj;
|
Field<T> other = (Field<T>) obj;
|
||||||
return getValue().equals(other.getValue());
|
return getValue().equals(other.getValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
@ -44,7 +45,8 @@ public class GeoLocation implements Serializable {
|
||||||
public String toComparableString() {
|
public String toComparableString() {
|
||||||
return isBlank()
|
return isBlank()
|
||||||
? ""
|
? ""
|
||||||
: String.format(
|
: String
|
||||||
|
.format(
|
||||||
"%s::%s%s",
|
"%s::%s%s",
|
||||||
point != null ? point.toLowerCase() : "",
|
point != null ? point.toLowerCase() : "",
|
||||||
box != null ? box.toLowerCase() : "",
|
box != null ? box.toLowerCase() : "",
|
||||||
|
@ -58,9 +60,12 @@ public class GeoLocation implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
GeoLocation other = (GeoLocation) obj;
|
GeoLocation other = (GeoLocation) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -121,7 +122,8 @@ public class Instance implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toComparableString() {
|
public String toComparableString() {
|
||||||
return String.format(
|
return String
|
||||||
|
.format(
|
||||||
"%s::%s::%s::%s",
|
"%s::%s::%s::%s",
|
||||||
hostedby != null && hostedby.getKey() != null ? hostedby.getKey().toLowerCase() : "",
|
hostedby != null && hostedby.getKey() != null ? hostedby.getKey().toLowerCase() : "",
|
||||||
accessright != null && accessright.getClassid() != null ? accessright.getClassid() : "",
|
accessright != null && accessright.getClassid() != null ? accessright.getClassid() : "",
|
||||||
|
@ -136,9 +138,12 @@ public class Instance implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
Instance other = (Instance) obj;
|
Instance other = (Instance) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -127,8 +128,10 @@ public class Journal implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
Journal journal = (Journal) o;
|
Journal journal = (Journal) o;
|
||||||
return Objects.equals(name, journal.name)
|
return Objects.equals(name, journal.name)
|
||||||
&& Objects.equals(issnPrinted, journal.issnPrinted)
|
&& Objects.equals(issnPrinted, journal.issnPrinted)
|
||||||
|
@ -146,7 +149,8 @@ public class Journal implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(
|
return Objects
|
||||||
|
.hash(
|
||||||
name,
|
name,
|
||||||
issnPrinted,
|
issnPrinted,
|
||||||
issnOnline,
|
issnOnline,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
@ -39,7 +40,8 @@ public class KeyValue implements Serializable {
|
||||||
public String toComparableString() {
|
public String toComparableString() {
|
||||||
return isBlank()
|
return isBlank()
|
||||||
? ""
|
? ""
|
||||||
: String.format(
|
: String
|
||||||
|
.format(
|
||||||
"%s::%s",
|
"%s::%s",
|
||||||
key != null ? key.toLowerCase() : "", value != null ? value.toLowerCase() : "");
|
key != null ? key.toLowerCase() : "", value != null ? value.toLowerCase() : "");
|
||||||
}
|
}
|
||||||
|
@ -56,9 +58,12 @@ public class KeyValue implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
KeyValue other = (KeyValue) obj;
|
KeyValue other = (KeyValue) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -17,8 +18,10 @@ public class OAIProvenance implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
OAIProvenance that = (OAIProvenance) o;
|
OAIProvenance that = (OAIProvenance) o;
|
||||||
return Objects.equals(originDescription, that.originDescription);
|
return Objects.equals(originDescription, that.originDescription);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -37,11 +38,13 @@ public abstract class Oaf implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void mergeOAFDataInfo(Oaf e) {
|
public void mergeOAFDataInfo(Oaf e) {
|
||||||
if (e.getDataInfo() != null && compareTrust(this, e) < 0) dataInfo = e.getDataInfo();
|
if (e.getDataInfo() != null && compareTrust(this, e) < 0)
|
||||||
|
dataInfo = e.getDataInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String extractTrust(Oaf e) {
|
protected String extractTrust(Oaf e) {
|
||||||
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null) return "0.0";
|
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null)
|
||||||
|
return "0.0";
|
||||||
return e.getDataInfo().getTrust();
|
return e.getDataInfo().getTrust();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,8 +54,10 @@ public abstract class Oaf implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
Oaf oaf = (Oaf) o;
|
Oaf oaf = (Oaf) o;
|
||||||
return Objects.equals(dataInfo, oaf.dataInfo)
|
return Objects.equals(dataInfo, oaf.dataInfo)
|
||||||
&& Objects.equals(lastupdatetimestamp, oaf.lastupdatetimestamp);
|
&& Objects.equals(lastupdatetimestamp, oaf.lastupdatetimestamp);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -78,7 +79,8 @@ public abstract class OafEntity extends Oaf implements Serializable {
|
||||||
|
|
||||||
public void mergeFrom(OafEntity e) {
|
public void mergeFrom(OafEntity e) {
|
||||||
|
|
||||||
if (e == null) return;
|
if (e == null)
|
||||||
|
return;
|
||||||
|
|
||||||
originalId = mergeLists(originalId, e.getOriginalId());
|
originalId = mergeLists(originalId, e.getOriginalId());
|
||||||
|
|
||||||
|
@ -100,7 +102,8 @@ public abstract class OafEntity extends Oaf implements Serializable {
|
||||||
|
|
||||||
protected <T> List<T> mergeLists(final List<T>... lists) {
|
protected <T> List<T> mergeLists(final List<T>... lists) {
|
||||||
|
|
||||||
return Arrays.stream(lists)
|
return Arrays
|
||||||
|
.stream(lists)
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.flatMap(List::stream)
|
.flatMap(List::stream)
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -109,9 +112,12 @@ public abstract class OafEntity extends Oaf implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
if (!super.equals(o)) return false;
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
if (!super.equals(o))
|
||||||
|
return false;
|
||||||
OafEntity oafEntity = (OafEntity) o;
|
OafEntity oafEntity = (OafEntity) o;
|
||||||
return Objects.equals(id, oafEntity.id);
|
return Objects.equals(id, oafEntity.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -175,50 +176,38 @@ public class Organization extends OafEntity implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
final Organization o = (Organization) e;
|
final Organization o = (Organization) e;
|
||||||
legalshortname =
|
legalshortname = o.getLegalshortname() != null && compareTrust(this, e) < 0
|
||||||
o.getLegalshortname() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getLegalshortname()
|
? o.getLegalshortname()
|
||||||
: legalshortname;
|
: legalshortname;
|
||||||
legalname =
|
legalname = o.getLegalname() != null && compareTrust(this, e) < 0 ? o.getLegalname() : legalname;
|
||||||
o.getLegalname() != null && compareTrust(this, e) < 0 ? o.getLegalname() : legalname;
|
|
||||||
alternativeNames = mergeLists(o.getAlternativeNames(), alternativeNames);
|
alternativeNames = mergeLists(o.getAlternativeNames(), alternativeNames);
|
||||||
websiteurl =
|
websiteurl = o.getWebsiteurl() != null && compareTrust(this, e) < 0 ? o.getWebsiteurl() : websiteurl;
|
||||||
o.getWebsiteurl() != null && compareTrust(this, e) < 0 ? o.getWebsiteurl() : websiteurl;
|
|
||||||
logourl = o.getLogourl() != null && compareTrust(this, e) < 0 ? o.getLogourl() : logourl;
|
logourl = o.getLogourl() != null && compareTrust(this, e) < 0 ? o.getLogourl() : logourl;
|
||||||
eclegalbody =
|
eclegalbody = o.getEclegalbody() != null && compareTrust(this, e) < 0 ? o.getEclegalbody() : eclegalbody;
|
||||||
o.getEclegalbody() != null && compareTrust(this, e) < 0 ? o.getEclegalbody() : eclegalbody;
|
eclegalperson = o.getEclegalperson() != null && compareTrust(this, e) < 0
|
||||||
eclegalperson =
|
|
||||||
o.getEclegalperson() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEclegalperson()
|
? o.getEclegalperson()
|
||||||
: eclegalperson;
|
: eclegalperson;
|
||||||
ecnonprofit =
|
ecnonprofit = o.getEcnonprofit() != null && compareTrust(this, e) < 0 ? o.getEcnonprofit() : ecnonprofit;
|
||||||
o.getEcnonprofit() != null && compareTrust(this, e) < 0 ? o.getEcnonprofit() : ecnonprofit;
|
ecresearchorganization = o.getEcresearchorganization() != null && compareTrust(this, e) < 0
|
||||||
ecresearchorganization =
|
|
||||||
o.getEcresearchorganization() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEcresearchorganization()
|
? o.getEcresearchorganization()
|
||||||
: ecresearchorganization;
|
: ecresearchorganization;
|
||||||
echighereducation =
|
echighereducation = o.getEchighereducation() != null && compareTrust(this, e) < 0
|
||||||
o.getEchighereducation() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEchighereducation()
|
? o.getEchighereducation()
|
||||||
: echighereducation;
|
: echighereducation;
|
||||||
ecinternationalorganizationeurinterests =
|
ecinternationalorganizationeurinterests = o.getEcinternationalorganizationeurinterests() != null
|
||||||
o.getEcinternationalorganizationeurinterests() != null && compareTrust(this, e) < 0
|
&& compareTrust(this, e) < 0
|
||||||
? o.getEcinternationalorganizationeurinterests()
|
? o.getEcinternationalorganizationeurinterests()
|
||||||
: ecinternationalorganizationeurinterests;
|
: ecinternationalorganizationeurinterests;
|
||||||
ecinternationalorganization =
|
ecinternationalorganization = o.getEcinternationalorganization() != null && compareTrust(this, e) < 0
|
||||||
o.getEcinternationalorganization() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEcinternationalorganization()
|
? o.getEcinternationalorganization()
|
||||||
: ecinternationalorganization;
|
: ecinternationalorganization;
|
||||||
ecenterprise =
|
ecenterprise = o.getEcenterprise() != null && compareTrust(this, e) < 0
|
||||||
o.getEcenterprise() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEcenterprise()
|
? o.getEcenterprise()
|
||||||
: ecenterprise;
|
: ecenterprise;
|
||||||
ecsmevalidated =
|
ecsmevalidated = o.getEcsmevalidated() != null && compareTrust(this, e) < 0
|
||||||
o.getEcsmevalidated() != null && compareTrust(this, e) < 0
|
|
||||||
? o.getEcsmevalidated()
|
? o.getEcsmevalidated()
|
||||||
: ecsmevalidated;
|
: ecsmevalidated;
|
||||||
ecnutscode =
|
ecnutscode = o.getEcnutscode() != null && compareTrust(this, e) < 0 ? o.getEcnutscode() : ecnutscode;
|
||||||
o.getEcnutscode() != null && compareTrust(this, e) < 0 ? o.getEcnutscode() : ecnutscode;
|
|
||||||
country = o.getCountry() != null && compareTrust(this, e) < 0 ? o.getCountry() : country;
|
country = o.getCountry() != null && compareTrust(this, e) < 0 ? o.getCountry() : country;
|
||||||
mergeOAFDataInfo(o);
|
mergeOAFDataInfo(o);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -67,8 +68,10 @@ public class OriginDescription implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
OriginDescription that = (OriginDescription) o;
|
OriginDescription that = (OriginDescription) o;
|
||||||
return Objects.equals(harvestDate, that.harvestDate)
|
return Objects.equals(harvestDate, that.harvestDate)
|
||||||
&& Objects.equals(altered, that.altered)
|
&& Objects.equals(altered, that.altered)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -275,63 +276,48 @@ public class Project extends OafEntity implements Serializable {
|
||||||
|
|
||||||
Project p = (Project) e;
|
Project p = (Project) e;
|
||||||
|
|
||||||
websiteurl =
|
websiteurl = p.getWebsiteurl() != null && compareTrust(this, e) < 0 ? p.getWebsiteurl() : websiteurl;
|
||||||
p.getWebsiteurl() != null && compareTrust(this, e) < 0 ? p.getWebsiteurl() : websiteurl;
|
|
||||||
code = p.getCode() != null && compareTrust(this, e) < 0 ? p.getCode() : code;
|
code = p.getCode() != null && compareTrust(this, e) < 0 ? p.getCode() : code;
|
||||||
acronym = p.getAcronym() != null && compareTrust(this, e) < 0 ? p.getAcronym() : acronym;
|
acronym = p.getAcronym() != null && compareTrust(this, e) < 0 ? p.getAcronym() : acronym;
|
||||||
title = p.getTitle() != null && compareTrust(this, e) < 0 ? p.getTitle() : title;
|
title = p.getTitle() != null && compareTrust(this, e) < 0 ? p.getTitle() : title;
|
||||||
startdate =
|
startdate = p.getStartdate() != null && compareTrust(this, e) < 0 ? p.getStartdate() : startdate;
|
||||||
p.getStartdate() != null && compareTrust(this, e) < 0 ? p.getStartdate() : startdate;
|
|
||||||
enddate = p.getEnddate() != null && compareTrust(this, e) < 0 ? p.getEnddate() : enddate;
|
enddate = p.getEnddate() != null && compareTrust(this, e) < 0 ? p.getEnddate() : enddate;
|
||||||
callidentifier =
|
callidentifier = p.getCallidentifier() != null && compareTrust(this, e) < 0
|
||||||
p.getCallidentifier() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getCallidentifier()
|
? p.getCallidentifier()
|
||||||
: callidentifier;
|
: callidentifier;
|
||||||
keywords = p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
keywords = p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
||||||
duration = p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
duration = p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
||||||
ecsc39 = p.getEcsc39() != null && compareTrust(this, e) < 0 ? p.getEcsc39() : ecsc39;
|
ecsc39 = p.getEcsc39() != null && compareTrust(this, e) < 0 ? p.getEcsc39() : ecsc39;
|
||||||
oamandatepublications =
|
oamandatepublications = p.getOamandatepublications() != null && compareTrust(this, e) < 0
|
||||||
p.getOamandatepublications() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getOamandatepublications()
|
? p.getOamandatepublications()
|
||||||
: oamandatepublications;
|
: oamandatepublications;
|
||||||
ecarticle29_3 =
|
ecarticle29_3 = p.getEcarticle29_3() != null && compareTrust(this, e) < 0
|
||||||
p.getEcarticle29_3() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getEcarticle29_3()
|
? p.getEcarticle29_3()
|
||||||
: ecarticle29_3;
|
: ecarticle29_3;
|
||||||
subjects = mergeLists(subjects, p.getSubjects());
|
subjects = mergeLists(subjects, p.getSubjects());
|
||||||
fundingtree = mergeLists(fundingtree, p.getFundingtree());
|
fundingtree = mergeLists(fundingtree, p.getFundingtree());
|
||||||
contracttype =
|
contracttype = p.getContracttype() != null && compareTrust(this, e) < 0
|
||||||
p.getContracttype() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getContracttype()
|
? p.getContracttype()
|
||||||
: contracttype;
|
: contracttype;
|
||||||
optional1 =
|
optional1 = p.getOptional1() != null && compareTrust(this, e) < 0 ? p.getOptional1() : optional1;
|
||||||
p.getOptional1() != null && compareTrust(this, e) < 0 ? p.getOptional1() : optional1;
|
optional2 = p.getOptional2() != null && compareTrust(this, e) < 0 ? p.getOptional2() : optional2;
|
||||||
optional2 =
|
jsonextrainfo = p.getJsonextrainfo() != null && compareTrust(this, e) < 0
|
||||||
p.getOptional2() != null && compareTrust(this, e) < 0 ? p.getOptional2() : optional2;
|
|
||||||
jsonextrainfo =
|
|
||||||
p.getJsonextrainfo() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getJsonextrainfo()
|
? p.getJsonextrainfo()
|
||||||
: jsonextrainfo;
|
: jsonextrainfo;
|
||||||
contactfullname =
|
contactfullname = p.getContactfullname() != null && compareTrust(this, e) < 0
|
||||||
p.getContactfullname() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getContactfullname()
|
? p.getContactfullname()
|
||||||
: contactfullname;
|
: contactfullname;
|
||||||
contactfax =
|
contactfax = p.getContactfax() != null && compareTrust(this, e) < 0 ? p.getContactfax() : contactfax;
|
||||||
p.getContactfax() != null && compareTrust(this, e) < 0 ? p.getContactfax() : contactfax;
|
contactphone = p.getContactphone() != null && compareTrust(this, e) < 0
|
||||||
contactphone =
|
|
||||||
p.getContactphone() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getContactphone()
|
? p.getContactphone()
|
||||||
: contactphone;
|
: contactphone;
|
||||||
contactemail =
|
contactemail = p.getContactemail() != null && compareTrust(this, e) < 0
|
||||||
p.getContactemail() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getContactemail()
|
? p.getContactemail()
|
||||||
: contactemail;
|
: contactemail;
|
||||||
summary = p.getSummary() != null && compareTrust(this, e) < 0 ? p.getSummary() : summary;
|
summary = p.getSummary() != null && compareTrust(this, e) < 0 ? p.getSummary() : summary;
|
||||||
currency = p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
currency = p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
||||||
totalcost =
|
totalcost = p.getTotalcost() != null && compareTrust(this, e) < 0 ? p.getTotalcost() : totalcost;
|
||||||
p.getTotalcost() != null && compareTrust(this, e) < 0 ? p.getTotalcost() : totalcost;
|
fundedamount = p.getFundedamount() != null && compareTrust(this, e) < 0
|
||||||
fundedamount =
|
|
||||||
p.getFundedamount() != null && compareTrust(this, e) < 0
|
|
||||||
? p.getFundedamount()
|
? p.getFundedamount()
|
||||||
: fundedamount;
|
: fundedamount;
|
||||||
mergeOAFDataInfo(e);
|
mergeOAFDataInfo(e);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
@ -30,7 +31,8 @@ public class Publication extends Result implements Serializable {
|
||||||
|
|
||||||
Publication p = (Publication) e;
|
Publication p = (Publication) e;
|
||||||
|
|
||||||
if (p.getJournal() != null && compareTrust(this, e) < 0) journal = p.getJournal();
|
if (p.getJournal() != null && compareTrust(this, e) < 0)
|
||||||
|
journal = p.getJournal();
|
||||||
mergeOAFDataInfo(e);
|
mergeOAFDataInfo(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
@ -46,7 +47,8 @@ public class Qualifier implements Serializable {
|
||||||
public String toComparableString() {
|
public String toComparableString() {
|
||||||
return isBlank()
|
return isBlank()
|
||||||
? ""
|
? ""
|
||||||
: String.format(
|
: String
|
||||||
|
.format(
|
||||||
"%s::%s::%s::%s",
|
"%s::%s::%s::%s",
|
||||||
classid != null ? classid : "",
|
classid != null ? classid : "",
|
||||||
classname != null ? classname : "",
|
classname != null ? classname : "",
|
||||||
|
@ -69,9 +71,12 @@ public class Qualifier implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
Qualifier other = (Qualifier) obj;
|
Qualifier other = (Qualifier) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import static com.google.common.base.Preconditions.checkArgument;
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
|
@ -68,11 +69,14 @@ public class Relation extends Oaf {
|
||||||
checkArgument(Objects.equals(getRelClass(), r.getRelClass()), "relClass(es) must be equal");
|
checkArgument(Objects.equals(getRelClass(), r.getRelClass()), "relClass(es) must be equal");
|
||||||
|
|
||||||
setCollectedfrom(
|
setCollectedfrom(
|
||||||
Stream.concat(
|
Stream
|
||||||
Optional.ofNullable(getCollectedfrom())
|
.concat(
|
||||||
|
Optional
|
||||||
|
.ofNullable(getCollectedfrom())
|
||||||
.map(Collection::stream)
|
.map(Collection::stream)
|
||||||
.orElse(Stream.empty()),
|
.orElse(Stream.empty()),
|
||||||
Optional.ofNullable(r.getCollectedfrom())
|
Optional
|
||||||
|
.ofNullable(r.getCollectedfrom())
|
||||||
.map(Collection::stream)
|
.map(Collection::stream)
|
||||||
.orElse(Stream.empty()))
|
.orElse(Stream.empty()))
|
||||||
.distinct() // relies on KeyValue.equals
|
.distinct() // relies on KeyValue.equals
|
||||||
|
@ -81,8 +85,10 @@ public class Relation extends Oaf {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o)
|
||||||
if (o == null || getClass() != o.getClass()) return false;
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
Relation relation = (Relation) o;
|
Relation relation = (Relation) o;
|
||||||
return relType.equals(relation.relType)
|
return relType.equals(relation.relType)
|
||||||
&& subRelType.equals(relation.subRelType)
|
&& subRelType.equals(relation.subRelType)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -233,9 +234,11 @@ public class Result extends OafEntity implements Serializable {
|
||||||
if (r.getBestaccessright() != null && compareTrust(this, r) < 0)
|
if (r.getBestaccessright() != null && compareTrust(this, r) < 0)
|
||||||
bestaccessright = r.getBestaccessright();
|
bestaccessright = r.getBestaccessright();
|
||||||
|
|
||||||
if (r.getResulttype() != null && compareTrust(this, r) < 0) resulttype = r.getResulttype();
|
if (r.getResulttype() != null && compareTrust(this, r) < 0)
|
||||||
|
resulttype = r.getResulttype();
|
||||||
|
|
||||||
if (r.getLanguage() != null && compareTrust(this, r) < 0) language = r.getLanguage();
|
if (r.getLanguage() != null && compareTrust(this, r) < 0)
|
||||||
|
language = r.getLanguage();
|
||||||
|
|
||||||
country = mergeLists(country, r.getCountry());
|
country = mergeLists(country, r.getCountry());
|
||||||
|
|
||||||
|
@ -247,7 +250,8 @@ public class Result extends OafEntity implements Serializable {
|
||||||
|
|
||||||
description = longestLists(description, r.getDescription());
|
description = longestLists(description, r.getDescription());
|
||||||
|
|
||||||
if (r.getPublisher() != null && compareTrust(this, r) < 0) publisher = r.getPublisher();
|
if (r.getPublisher() != null && compareTrust(this, r) < 0)
|
||||||
|
publisher = r.getPublisher();
|
||||||
|
|
||||||
if (r.getEmbargoenddate() != null && compareTrust(this, r) < 0)
|
if (r.getEmbargoenddate() != null && compareTrust(this, r) < 0)
|
||||||
embargoenddate = r.getEmbargoenddate();
|
embargoenddate = r.getEmbargoenddate();
|
||||||
|
@ -260,7 +264,8 @@ public class Result extends OafEntity implements Serializable {
|
||||||
|
|
||||||
contributor = mergeLists(contributor, r.getContributor());
|
contributor = mergeLists(contributor, r.getContributor());
|
||||||
|
|
||||||
if (r.getResourcetype() != null) resourcetype = r.getResourcetype();
|
if (r.getResourcetype() != null)
|
||||||
|
resourcetype = r.getResourcetype();
|
||||||
|
|
||||||
coverage = mergeLists(coverage, r.getCoverage());
|
coverage = mergeLists(coverage, r.getCoverage());
|
||||||
|
|
||||||
|
@ -270,16 +275,17 @@ public class Result extends OafEntity implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<Field<String>> longestLists(List<Field<String>> a, List<Field<String>> b) {
|
private List<Field<String>> longestLists(List<Field<String>> a, List<Field<String>> b) {
|
||||||
if (a == null || b == null) return a == null ? b : a;
|
if (a == null || b == null)
|
||||||
|
return a == null ? b : a;
|
||||||
if (a.size() == b.size()) {
|
if (a.size() == b.size()) {
|
||||||
int msa =
|
int msa = a
|
||||||
a.stream()
|
.stream()
|
||||||
.filter(i -> i.getValue() != null)
|
.filter(i -> i.getValue() != null)
|
||||||
.map(i -> i.getValue().length())
|
.map(i -> i.getValue().length())
|
||||||
.max(Comparator.naturalOrder())
|
.max(Comparator.naturalOrder())
|
||||||
.orElse(0);
|
.orElse(0);
|
||||||
int msb =
|
int msb = b
|
||||||
b.stream()
|
.stream()
|
||||||
.filter(i -> i.getValue() != null)
|
.filter(i -> i.getValue() != null)
|
||||||
.map(i -> i.getValue().length())
|
.map(i -> i.getValue().length())
|
||||||
.max(Comparator.naturalOrder())
|
.max(Comparator.naturalOrder())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
@ -63,13 +64,11 @@ public class Software extends Result implements Serializable {
|
||||||
|
|
||||||
license = mergeLists(license, s.getLicense());
|
license = mergeLists(license, s.getLicense());
|
||||||
|
|
||||||
codeRepositoryUrl =
|
codeRepositoryUrl = s.getCodeRepositoryUrl() != null && compareTrust(this, s) < 0
|
||||||
s.getCodeRepositoryUrl() != null && compareTrust(this, s) < 0
|
|
||||||
? s.getCodeRepositoryUrl()
|
? s.getCodeRepositoryUrl()
|
||||||
: codeRepositoryUrl;
|
: codeRepositoryUrl;
|
||||||
|
|
||||||
programmingLanguage =
|
programmingLanguage = s.getProgrammingLanguage() != null && compareTrust(this, s) < 0
|
||||||
s.getProgrammingLanguage() != null && compareTrust(this, s) < 0
|
|
||||||
? s.getProgrammingLanguage()
|
? s.getProgrammingLanguage()
|
||||||
: programmingLanguage;
|
: programmingLanguage;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -45,9 +46,12 @@ public class StructuredProperty implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (this == obj) return true;
|
if (this == obj)
|
||||||
if (obj == null) return false;
|
return true;
|
||||||
if (getClass() != obj.getClass()) return false;
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
StructuredProperty other = (StructuredProperty) obj;
|
StructuredProperty other = (StructuredProperty) obj;
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
@ -46,7 +47,8 @@ public class DLIDataset extends Dataset {
|
||||||
DLIDataset p = (DLIDataset) e;
|
DLIDataset p = (DLIDataset) e;
|
||||||
if (StringUtils.isBlank(completionStatus) && StringUtils.isNotBlank(p.completionStatus))
|
if (StringUtils.isBlank(completionStatus) && StringUtils.isNotBlank(p.completionStatus))
|
||||||
completionStatus = p.completionStatus;
|
completionStatus = p.completionStatus;
|
||||||
if ("complete".equalsIgnoreCase(p.completionStatus)) completionStatus = "complete";
|
if ("complete".equalsIgnoreCase(p.completionStatus))
|
||||||
|
completionStatus = "complete";
|
||||||
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +56,8 @@ public class DLIDataset extends Dataset {
|
||||||
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
||||||
Map<String, ProvenaceInfo> result = new HashMap<>();
|
Map<String, ProvenaceInfo> result = new HashMap<>();
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
@ -66,7 +69,8 @@ public class DLIDataset extends Dataset {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
});
|
});
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
@ -44,7 +45,8 @@ public class DLIPublication extends Publication implements Serializable {
|
||||||
DLIPublication p = (DLIPublication) e;
|
DLIPublication p = (DLIPublication) e;
|
||||||
if (StringUtils.isBlank(completionStatus) && StringUtils.isNotBlank(p.completionStatus))
|
if (StringUtils.isBlank(completionStatus) && StringUtils.isNotBlank(p.completionStatus))
|
||||||
completionStatus = p.completionStatus;
|
completionStatus = p.completionStatus;
|
||||||
if ("complete".equalsIgnoreCase(p.completionStatus)) completionStatus = "complete";
|
if ("complete".equalsIgnoreCase(p.completionStatus))
|
||||||
|
completionStatus = "complete";
|
||||||
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,7 +54,8 @@ public class DLIPublication extends Publication implements Serializable {
|
||||||
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
||||||
Map<String, ProvenaceInfo> result = new HashMap<>();
|
Map<String, ProvenaceInfo> result = new HashMap<>();
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
@ -64,7 +67,8 @@ public class DLIPublication extends Publication implements Serializable {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
});
|
});
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
|
@ -72,7 +73,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void mergeFrom(DLIUnknown p) {
|
public void mergeFrom(DLIUnknown p) {
|
||||||
if ("complete".equalsIgnoreCase(p.completionStatus)) completionStatus = "complete";
|
if ("complete".equalsIgnoreCase(p.completionStatus))
|
||||||
|
completionStatus = "complete";
|
||||||
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
dlicollectedfrom = mergeProvenance(dlicollectedfrom, p.getDlicollectedfrom());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,7 +82,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
||||||
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
final List<ProvenaceInfo> a, final List<ProvenaceInfo> b) {
|
||||||
Map<String, ProvenaceInfo> result = new HashMap<>();
|
Map<String, ProvenaceInfo> result = new HashMap<>();
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
@ -92,7 +95,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
});
|
});
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b
|
||||||
|
.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.action;
|
package eu.dnetlib.dhp.schema.action;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.common;
|
package eu.dnetlib.dhp.schema.common;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf;
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
@ -22,13 +23,17 @@ public class DLItest {
|
||||||
a1.setCompletionStatus("complete");
|
a1.setCompletionStatus("complete");
|
||||||
|
|
||||||
DLIPublication a = new DLIPublication();
|
DLIPublication a = new DLIPublication();
|
||||||
a.setPid(
|
a
|
||||||
Arrays.asList(
|
.setPid(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
createSP("10.11", "doi", "dnet:pid_types"),
|
createSP("10.11", "doi", "dnet:pid_types"),
|
||||||
createSP("123456", "pdb", "dnet:pid_types")));
|
createSP("123456", "pdb", "dnet:pid_types")));
|
||||||
a.setTitle(Collections.singletonList(createSP("A Title", "title", "dnetTitle")));
|
a.setTitle(Collections.singletonList(createSP("A Title", "title", "dnetTitle")));
|
||||||
a.setDlicollectedfrom(
|
a
|
||||||
Arrays.asList(
|
.setDlicollectedfrom(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
createCollectedFrom("dct", "datacite", "complete"),
|
createCollectedFrom("dct", "datacite", "complete"),
|
||||||
createCollectedFrom("dct", "datacite", "incomplete")));
|
createCollectedFrom("dct", "datacite", "incomplete")));
|
||||||
a.setCompletionStatus("incomplete");
|
a.setCompletionStatus("incomplete");
|
||||||
|
@ -42,8 +47,7 @@ public class DLItest {
|
||||||
@Test
|
@Test
|
||||||
public void testDeserialization() throws IOException {
|
public void testDeserialization() throws IOException {
|
||||||
|
|
||||||
final String json =
|
final String json = "{\"dataInfo\":{\"invisible\":false,\"inferred\":null,\"deletedbyinference\":false,\"trust\":\"0.9\",\"inferenceprovenance\":null,\"provenanceaction\":null},\"lastupdatetimestamp\":null,\"id\":\"60|bd9352547098929a394655ad1a44a479\",\"originalId\":[\"bd9352547098929a394655ad1a44a479\"],\"collectedfrom\":[{\"key\":\"dli_________::datacite\",\"value\":\"Datasets in Datacite\",\"dataInfo\":null,\"blank\":false}],\"pid\":[{\"value\":\"10.7925/DRS1.DUCHAS_5078760\",\"qualifier\":{\"classid\":\"doi\",\"classname\":\"doi\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\",\"blank\":false},\"dataInfo\":null}],\"dateofcollection\":\"2020-01-09T08:29:31.885Z\",\"dateoftransformation\":null,\"extraInfo\":null,\"oaiprovenance\":null,\"author\":[{\"fullname\":\"Cathail, S. Ó\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Donnell, Breda Mc\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Ireland. Department of Arts, Culture, and the Gaeltacht\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"University College Dublin\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"National Folklore Foundation\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Cathail, S. Ó\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Donnell, Breda Mc\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null}],\"resulttype\":null,\"language\":null,\"country\":null,\"subject\":[{\"value\":\"Recreation\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null},{\"value\":\"Entertainments and recreational activities\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null},{\"value\":\"Siamsaíocht agus caitheamh aimsire\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null}],\"title\":[{\"value\":\"Games We Play\",\"qualifier\":null,\"dataInfo\":null}],\"relevantdate\":[{\"value\":\"1938-09-28\",\"qualifier\":{\"classid\":\"date\",\"classname\":\"date\",\"schemeid\":\"dnet::date\",\"schemename\":\"dnet::date\",\"blank\":false},\"dataInfo\":null}],\"description\":[{\"value\":\"Story collected by Breda Mc Donnell, a student at Tenure school (Tinure, Co. Louth) (no informant identified).\",\"dataInfo\":null}],\"dateofacceptance\":null,\"publisher\":{\"value\":\"University College Dublin\",\"dataInfo\":null},\"embargoenddate\":null,\"source\":null,\"fulltext\":null,\"format\":null,\"contributor\":null,\"resourcetype\":null,\"coverage\":null,\"refereed\":null,\"context\":null,\"processingchargeamount\":null,\"processingchargecurrency\":null,\"externalReference\":null,\"instance\":[],\"storagedate\":null,\"device\":null,\"size\":null,\"version\":null,\"lastmetadataupdate\":null,\"metadataversionnumber\":null,\"geolocation\":null,\"dlicollectedfrom\":[{\"id\":\"dli_________::datacite\",\"name\":\"Datasets in Datacite\",\"completionStatus\":\"complete\",\"collectionMode\":\"resolved\"}],\"completionStatus\":\"complete\"}";
|
||||||
"{\"dataInfo\":{\"invisible\":false,\"inferred\":null,\"deletedbyinference\":false,\"trust\":\"0.9\",\"inferenceprovenance\":null,\"provenanceaction\":null},\"lastupdatetimestamp\":null,\"id\":\"60|bd9352547098929a394655ad1a44a479\",\"originalId\":[\"bd9352547098929a394655ad1a44a479\"],\"collectedfrom\":[{\"key\":\"dli_________::datacite\",\"value\":\"Datasets in Datacite\",\"dataInfo\":null,\"blank\":false}],\"pid\":[{\"value\":\"10.7925/DRS1.DUCHAS_5078760\",\"qualifier\":{\"classid\":\"doi\",\"classname\":\"doi\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\",\"blank\":false},\"dataInfo\":null}],\"dateofcollection\":\"2020-01-09T08:29:31.885Z\",\"dateoftransformation\":null,\"extraInfo\":null,\"oaiprovenance\":null,\"author\":[{\"fullname\":\"Cathail, S. Ó\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Donnell, Breda Mc\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Ireland. Department of Arts, Culture, and the Gaeltacht\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"University College Dublin\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"National Folklore Foundation\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Cathail, S. Ó\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null},{\"fullname\":\"Donnell, Breda Mc\",\"name\":null,\"surname\":null,\"rank\":null,\"pid\":null,\"affiliation\":null}],\"resulttype\":null,\"language\":null,\"country\":null,\"subject\":[{\"value\":\"Recreation\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null},{\"value\":\"Entertainments and recreational activities\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null},{\"value\":\"Siamsaíocht agus caitheamh aimsire\",\"qualifier\":{\"classid\":\"dnet:subject\",\"classname\":\"dnet:subject\",\"schemeid\":\"unknown\",\"schemename\":\"unknown\",\"blank\":false},\"dataInfo\":null}],\"title\":[{\"value\":\"Games We Play\",\"qualifier\":null,\"dataInfo\":null}],\"relevantdate\":[{\"value\":\"1938-09-28\",\"qualifier\":{\"classid\":\"date\",\"classname\":\"date\",\"schemeid\":\"dnet::date\",\"schemename\":\"dnet::date\",\"blank\":false},\"dataInfo\":null}],\"description\":[{\"value\":\"Story collected by Breda Mc Donnell, a student at Tenure school (Tinure, Co. Louth) (no informant identified).\",\"dataInfo\":null}],\"dateofacceptance\":null,\"publisher\":{\"value\":\"University College Dublin\",\"dataInfo\":null},\"embargoenddate\":null,\"source\":null,\"fulltext\":null,\"format\":null,\"contributor\":null,\"resourcetype\":null,\"coverage\":null,\"refereed\":null,\"context\":null,\"processingchargeamount\":null,\"processingchargecurrency\":null,\"externalReference\":null,\"instance\":[],\"storagedate\":null,\"device\":null,\"size\":null,\"version\":null,\"lastmetadataupdate\":null,\"metadataversionnumber\":null,\"geolocation\":null,\"dlicollectedfrom\":[{\"id\":\"dli_________::datacite\",\"name\":\"Datasets in Datacite\",\"completionStatus\":\"complete\",\"collectionMode\":\"resolved\"}],\"completionStatus\":\"complete\"}";
|
|
||||||
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager;
|
package eu.dnetlib.dhp.actionmanager;
|
||||||
|
|
||||||
import com.google.common.base.Splitter;
|
import com.google.common.base.Splitter;
|
||||||
|
@ -24,8 +25,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class ISClient implements Serializable {
|
public class ISClient implements Serializable {
|
||||||
|
|
||||||
private static final Logger log =
|
private static final Logger log = LoggerFactory.getLogger(PartitionActionSetsByPayloadTypeJob.class);
|
||||||
LoggerFactory.getLogger(PartitionActionSetsByPayloadTypeJob.class);
|
|
||||||
|
|
||||||
private static final String INPUT_ACTION_SET_ID_SEPARATOR = ",";
|
private static final String INPUT_ACTION_SET_ID_SEPARATOR = ",";
|
||||||
|
|
||||||
|
@ -37,14 +37,16 @@ public class ISClient implements Serializable {
|
||||||
|
|
||||||
public List<String> getLatestRawsetPaths(String setIds) {
|
public List<String> getLatestRawsetPaths(String setIds) {
|
||||||
|
|
||||||
List<String> ids =
|
List<String> ids = Lists
|
||||||
Lists.newArrayList(
|
.newArrayList(
|
||||||
Splitter.on(INPUT_ACTION_SET_ID_SEPARATOR)
|
Splitter
|
||||||
|
.on(INPUT_ACTION_SET_ID_SEPARATOR)
|
||||||
.omitEmptyStrings()
|
.omitEmptyStrings()
|
||||||
.trimResults()
|
.trimResults()
|
||||||
.split(setIds));
|
.split(setIds));
|
||||||
|
|
||||||
return ids.stream()
|
return ids
|
||||||
|
.stream()
|
||||||
.map(id -> getSet(isLookup, id))
|
.map(id -> getSet(isLookup, id))
|
||||||
.map(as -> as.getPathToLatest())
|
.map(as -> as.getPathToLatest())
|
||||||
.collect(Collectors.toCollection(ArrayList::new));
|
.collect(Collectors.toCollection(ArrayList::new));
|
||||||
|
@ -52,8 +54,7 @@ public class ISClient implements Serializable {
|
||||||
|
|
||||||
private ActionManagerSet getSet(ISLookUpService isLookup, final String setId) {
|
private ActionManagerSet getSet(ISLookUpService isLookup, final String setId) {
|
||||||
|
|
||||||
final String q =
|
final String q = "for $x in collection('/db/DRIVER/ActionManagerSetDSResources/ActionManagerSetDSResourceType') "
|
||||||
"for $x in collection('/db/DRIVER/ActionManagerSetDSResources/ActionManagerSetDSResourceType') "
|
|
||||||
+ "where $x//SET/@id = '"
|
+ "where $x//SET/@id = '"
|
||||||
+ setId
|
+ setId
|
||||||
+ "' return $x";
|
+ "' return $x";
|
||||||
|
@ -78,7 +79,8 @@ public class ISClient implements Serializable {
|
||||||
set.setId(doc.valueOf("//SET/@id").trim());
|
set.setId(doc.valueOf("//SET/@id").trim());
|
||||||
set.setName(doc.valueOf("//SET").trim());
|
set.setName(doc.valueOf("//SET").trim());
|
||||||
set.setImpact(ImpactTypes.valueOf(doc.valueOf("//IMPACT").trim()));
|
set.setImpact(ImpactTypes.valueOf(doc.valueOf("//IMPACT").trim()));
|
||||||
set.setLatest(
|
set
|
||||||
|
.setLatest(
|
||||||
doc.valueOf("//RAW_SETS/LATEST/@id"),
|
doc.valueOf("//RAW_SETS/LATEST/@id"),
|
||||||
doc.valueOf("//RAW_SETS/LATEST/@creationDate"),
|
doc.valueOf("//RAW_SETS/LATEST/@creationDate"),
|
||||||
doc.valueOf("//RAW_SETS/LATEST/@lastUpdate"));
|
doc.valueOf("//RAW_SETS/LATEST/@lastUpdate"));
|
||||||
|
@ -87,7 +89,8 @@ public class ISClient implements Serializable {
|
||||||
if (expiredNodes != null) {
|
if (expiredNodes != null) {
|
||||||
for (int i = 0; i < expiredNodes.size(); i++) {
|
for (int i = 0; i < expiredNodes.size(); i++) {
|
||||||
Element ex = (Element) expiredNodes.get(i);
|
Element ex = (Element) expiredNodes.get(i);
|
||||||
set.addExpired(
|
set
|
||||||
|
.addExpired(
|
||||||
ex.attributeValue("id"),
|
ex.attributeValue("id"),
|
||||||
ex.attributeValue("creationDate"),
|
ex.attributeValue("creationDate"),
|
||||||
ex.attributeValue("lastUpdate"));
|
ex.attributeValue("lastUpdate"));
|
||||||
|
@ -114,8 +117,7 @@ public class ISClient implements Serializable {
|
||||||
|
|
||||||
private String queryServiceProperty(ISLookUpService isLookup, final String propertyName)
|
private String queryServiceProperty(ISLookUpService isLookup, final String propertyName)
|
||||||
throws ActionManagerException {
|
throws ActionManagerException {
|
||||||
final String q =
|
final String q = "for $x in /RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='ActionManagerServiceResourceType'] return $x//SERVICE_PROPERTIES/PROPERTY[./@ key='"
|
||||||
"for $x in /RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='ActionManagerServiceResourceType'] return $x//SERVICE_PROPERTIES/PROPERTY[./@ key='"
|
|
||||||
+ propertyName
|
+ propertyName
|
||||||
+ "']/@value/string()";
|
+ "']/@value/string()";
|
||||||
log.debug("quering for service property: " + q);
|
log.debug("quering for service property: " + q);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.migration;
|
package eu.dnetlib.dhp.actionmanager.migration;
|
||||||
|
|
||||||
import eu.dnetlib.data.proto.FieldTypeProtos.Qualifier;
|
import eu.dnetlib.data.proto.FieldTypeProtos.Qualifier;
|
||||||
|
@ -8,38 +9,58 @@ public class LicenseComparator implements Comparator<Qualifier> {
|
||||||
@Override
|
@Override
|
||||||
public int compare(Qualifier left, Qualifier right) {
|
public int compare(Qualifier left, Qualifier right) {
|
||||||
|
|
||||||
if (left == null && right == null) return 0;
|
if (left == null && right == null)
|
||||||
if (left == null) return 1;
|
return 0;
|
||||||
if (right == null) return -1;
|
if (left == null)
|
||||||
|
return 1;
|
||||||
|
if (right == null)
|
||||||
|
return -1;
|
||||||
|
|
||||||
String lClass = left.getClassid();
|
String lClass = left.getClassid();
|
||||||
String rClass = right.getClassid();
|
String rClass = right.getClassid();
|
||||||
|
|
||||||
if (lClass.equals(rClass)) return 0;
|
if (lClass.equals(rClass))
|
||||||
|
return 0;
|
||||||
|
|
||||||
if (lClass.equals("OPEN SOURCE")) return -1;
|
if (lClass.equals("OPEN SOURCE"))
|
||||||
if (rClass.equals("OPEN SOURCE")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("OPEN SOURCE"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("OPEN")) return -1;
|
if (lClass.equals("OPEN"))
|
||||||
if (rClass.equals("OPEN")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("OPEN"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("6MONTHS")) return -1;
|
if (lClass.equals("6MONTHS"))
|
||||||
if (rClass.equals("6MONTHS")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("6MONTHS"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("12MONTHS")) return -1;
|
if (lClass.equals("12MONTHS"))
|
||||||
if (rClass.equals("12MONTHS")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("12MONTHS"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("EMBARGO")) return -1;
|
if (lClass.equals("EMBARGO"))
|
||||||
if (rClass.equals("EMBARGO")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("EMBARGO"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("RESTRICTED")) return -1;
|
if (lClass.equals("RESTRICTED"))
|
||||||
if (rClass.equals("RESTRICTED")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("RESTRICTED"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("CLOSED")) return -1;
|
if (lClass.equals("CLOSED"))
|
||||||
if (rClass.equals("CLOSED")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("CLOSED"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
if (lClass.equals("UNKNOWN")) return -1;
|
if (lClass.equals("UNKNOWN"))
|
||||||
if (rClass.equals("UNKNOWN")) return 1;
|
return -1;
|
||||||
|
if (rClass.equals("UNKNOWN"))
|
||||||
|
return 1;
|
||||||
|
|
||||||
// Else (but unlikely), lexicographical ordering will do.
|
// Else (but unlikely), lexicographical ordering will do.
|
||||||
return lClass.compareTo(rClass);
|
return lClass.compareTo(rClass);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.migration;
|
package eu.dnetlib.dhp.actionmanager.migration;
|
||||||
|
|
||||||
import com.google.common.base.Splitter;
|
import com.google.common.base.Splitter;
|
||||||
|
@ -34,10 +35,11 @@ public class MigrateActionSet {
|
||||||
private static final String RAWSET_PREFIX = "rawset_";
|
private static final String RAWSET_PREFIX = "rawset_";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser =
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
new ArgumentApplicationParser(
|
IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
MigrateActionSet.class.getResourceAsStream(
|
MigrateActionSet.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/actionmanager/migration/migrate_actionsets_parameters.json")));
|
"/eu/dnetlib/dhp/actionmanager/migration/migrate_actionsets_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
@ -68,8 +70,7 @@ public class MigrateActionSet {
|
||||||
Configuration conf = getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
Configuration conf = getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
||||||
FileSystem targetFS = FileSystem.get(conf);
|
FileSystem targetFS = FileSystem.get(conf);
|
||||||
|
|
||||||
Configuration sourceConf =
|
Configuration sourceConf = getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
||||||
getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
|
||||||
sourceConf.set(FileSystem.FS_DEFAULT_NAME_KEY, sourceNN);
|
sourceConf.set(FileSystem.FS_DEFAULT_NAME_KEY, sourceNN);
|
||||||
FileSystem sourceFS = FileSystem.get(sourceConf);
|
FileSystem sourceFS = FileSystem.get(sourceConf);
|
||||||
|
|
||||||
|
@ -78,7 +79,8 @@ public class MigrateActionSet {
|
||||||
List<Path> targetPaths = new ArrayList<>();
|
List<Path> targetPaths = new ArrayList<>();
|
||||||
|
|
||||||
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
||||||
log.info(
|
log
|
||||||
|
.info(
|
||||||
"paths to process:\n{}",
|
"paths to process:\n{}",
|
||||||
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
||||||
for (Path source : sourcePaths) {
|
for (Path source : sourcePaths) {
|
||||||
|
@ -87,8 +89,7 @@ public class MigrateActionSet {
|
||||||
log.warn("skipping unexisting path: {}", source);
|
log.warn("skipping unexisting path: {}", source);
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
LinkedList<String> pathQ =
|
LinkedList<String> pathQ = Lists.newLinkedList(Splitter.on(SEPARATOR).split(source.toUri().getPath()));
|
||||||
Lists.newLinkedList(Splitter.on(SEPARATOR).split(source.toUri().getPath()));
|
|
||||||
|
|
||||||
final String rawSet = pathQ.pollLast();
|
final String rawSet = pathQ.pollLast();
|
||||||
log.info("got RAWSET: {}", rawSet);
|
log.info("got RAWSET: {}", rawSet);
|
||||||
|
@ -97,8 +98,8 @@ public class MigrateActionSet {
|
||||||
|
|
||||||
final String actionSetDirectory = pathQ.pollLast();
|
final String actionSetDirectory = pathQ.pollLast();
|
||||||
|
|
||||||
final Path targetPath =
|
final Path targetPath = new Path(
|
||||||
new Path(targetNN + workDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawSet);
|
targetNN + workDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawSet);
|
||||||
|
|
||||||
log.info("using TARGET PATH: {}", targetPath);
|
log.info("using TARGET PATH: {}", targetPath);
|
||||||
|
|
||||||
|
@ -115,7 +116,8 @@ public class MigrateActionSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
props.setProperty(
|
props
|
||||||
|
.setProperty(
|
||||||
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||||
|
|
||||||
|
@ -140,8 +142,8 @@ public class MigrateActionSet {
|
||||||
op.preserve(DistCpOptions.FileAttribute.REPLICATION);
|
op.preserve(DistCpOptions.FileAttribute.REPLICATION);
|
||||||
op.preserve(DistCpOptions.FileAttribute.CHECKSUMTYPE);
|
op.preserve(DistCpOptions.FileAttribute.CHECKSUMTYPE);
|
||||||
|
|
||||||
int res =
|
int res = ToolRunner
|
||||||
ToolRunner.run(
|
.run(
|
||||||
new DistCp(conf, op),
|
new DistCp(conf, op),
|
||||||
new String[] {
|
new String[] {
|
||||||
"-Dmapred.task.timeout=" + distcp_task_timeout,
|
"-Dmapred.task.timeout=" + distcp_task_timeout,
|
||||||
|
@ -171,8 +173,7 @@ public class MigrateActionSet {
|
||||||
|
|
||||||
private List<Path> getSourcePaths(String sourceNN, ISLookUpService isLookUp)
|
private List<Path> getSourcePaths(String sourceNN, ISLookUpService isLookUp)
|
||||||
throws ISLookUpException {
|
throws ISLookUpException {
|
||||||
String XQUERY =
|
String XQUERY = "distinct-values(\n"
|
||||||
"distinct-values(\n"
|
|
||||||
+ "let $basePath := collection('/db/DRIVER/ServiceResources/ActionManagerServiceResourceType')//SERVICE_PROPERTIES/PROPERTY[@key = 'basePath']/@value/string()\n"
|
+ "let $basePath := collection('/db/DRIVER/ServiceResources/ActionManagerServiceResourceType')//SERVICE_PROPERTIES/PROPERTY[@key = 'basePath']/@value/string()\n"
|
||||||
+ "for $x in collection('/db/DRIVER/ActionManagerSetDSResources/ActionManagerSetDSResourceType') \n"
|
+ "for $x in collection('/db/DRIVER/ActionManagerSetDSResources/ActionManagerSetDSResourceType') \n"
|
||||||
+ "let $setDir := $x//SET/@directory/string()\n"
|
+ "let $setDir := $x//SET/@directory/string()\n"
|
||||||
|
@ -180,7 +181,9 @@ public class MigrateActionSet {
|
||||||
+ "return concat($basePath, '/', $setDir, '/', $rawSet))";
|
+ "return concat($basePath, '/', $setDir, '/', $rawSet))";
|
||||||
|
|
||||||
log.info(String.format("running xquery:\n%s", XQUERY));
|
log.info(String.format("running xquery:\n%s", XQUERY));
|
||||||
return isLookUp.quickSearchProfile(XQUERY).stream()
|
return isLookUp
|
||||||
|
.quickSearchProfile(XQUERY)
|
||||||
|
.stream()
|
||||||
.map(p -> sourceNN + p)
|
.map(p -> sourceNN + p)
|
||||||
.map(Path::new)
|
.map(Path::new)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.migration;
|
package eu.dnetlib.dhp.actionmanager.migration;
|
||||||
|
|
||||||
import static eu.dnetlib.data.proto.KindProtos.Kind.entity;
|
import static eu.dnetlib.data.proto.KindProtos.Kind.entity;
|
||||||
|
@ -46,7 +47,8 @@ public class ProtoConverter implements Serializable {
|
||||||
rel.setRelType(r.getRelType().toString());
|
rel.setRelType(r.getRelType().toString());
|
||||||
rel.setSubRelType(r.getSubRelType().toString());
|
rel.setSubRelType(r.getSubRelType().toString());
|
||||||
rel.setRelClass(r.getRelClass());
|
rel.setRelClass(r.getRelClass());
|
||||||
rel.setCollectedfrom(
|
rel
|
||||||
|
.setCollectedfrom(
|
||||||
r.getCollectedfromCount() > 0
|
r.getCollectedfromCount() > 0
|
||||||
? r.getCollectedfromList().stream().map(kv -> mapKV(kv)).collect(Collectors.toList())
|
? r.getCollectedfromList().stream().map(kv -> mapKV(kv)).collect(Collectors.toList())
|
||||||
: null);
|
: null);
|
||||||
|
@ -97,14 +99,16 @@ public class ProtoConverter implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
||||||
final OrganizationProtos.Organization.Metadata m =
|
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
|
||||||
oaf.getEntity().getOrganization().getMetadata();
|
|
||||||
final Organization org = setOaf(new Organization(), oaf);
|
final Organization org = setOaf(new Organization(), oaf);
|
||||||
setEntity(org, oaf);
|
setEntity(org, oaf);
|
||||||
org.setLegalshortname(mapStringField(m.getLegalshortname()));
|
org.setLegalshortname(mapStringField(m.getLegalshortname()));
|
||||||
org.setLegalname(mapStringField(m.getLegalname()));
|
org.setLegalname(mapStringField(m.getLegalname()));
|
||||||
org.setAlternativeNames(
|
org
|
||||||
m.getAlternativeNamesList().stream()
|
.setAlternativeNames(
|
||||||
|
m
|
||||||
|
.getAlternativeNamesList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
org.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
org.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
||||||
|
@ -114,7 +118,8 @@ public class ProtoConverter implements Serializable {
|
||||||
org.setEcnonprofit(mapStringField(m.getEcnonprofit()));
|
org.setEcnonprofit(mapStringField(m.getEcnonprofit()));
|
||||||
org.setEcresearchorganization(mapStringField(m.getEcresearchorganization()));
|
org.setEcresearchorganization(mapStringField(m.getEcresearchorganization()));
|
||||||
org.setEchighereducation(mapStringField(m.getEchighereducation()));
|
org.setEchighereducation(mapStringField(m.getEchighereducation()));
|
||||||
org.setEcinternationalorganizationeurinterests(
|
org
|
||||||
|
.setEcinternationalorganizationeurinterests(
|
||||||
mapStringField(m.getEcinternationalorganizationeurinterests()));
|
mapStringField(m.getEcinternationalorganizationeurinterests()));
|
||||||
org.setEcinternationalorganization(mapStringField(m.getEcinternationalorganization()));
|
org.setEcinternationalorganization(mapStringField(m.getEcinternationalorganization()));
|
||||||
org.setEcenterprise(mapStringField(m.getEcenterprise()));
|
org.setEcenterprise(mapStringField(m.getEcenterprise()));
|
||||||
|
@ -129,8 +134,11 @@ public class ProtoConverter implements Serializable {
|
||||||
final DatasourceProtos.Datasource.Metadata m = oaf.getEntity().getDatasource().getMetadata();
|
final DatasourceProtos.Datasource.Metadata m = oaf.getEntity().getDatasource().getMetadata();
|
||||||
final Datasource datasource = setOaf(new Datasource(), oaf);
|
final Datasource datasource = setOaf(new Datasource(), oaf);
|
||||||
setEntity(datasource, oaf);
|
setEntity(datasource, oaf);
|
||||||
datasource.setAccessinfopackage(
|
datasource
|
||||||
m.getAccessinfopackageList().stream()
|
.setAccessinfopackage(
|
||||||
|
m
|
||||||
|
.getAccessinfopackageList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
datasource.setCertificates(mapStringField(m.getCertificates()));
|
datasource.setCertificates(mapStringField(m.getCertificates()));
|
||||||
|
@ -151,12 +159,18 @@ public class ProtoConverter implements Serializable {
|
||||||
datasource.setLogourl(mapStringField(m.getLogourl()));
|
datasource.setLogourl(mapStringField(m.getLogourl()));
|
||||||
datasource.setMissionstatementurl(mapStringField(m.getMissionstatementurl()));
|
datasource.setMissionstatementurl(mapStringField(m.getMissionstatementurl()));
|
||||||
datasource.setNamespaceprefix(mapStringField(m.getNamespaceprefix()));
|
datasource.setNamespaceprefix(mapStringField(m.getNamespaceprefix()));
|
||||||
datasource.setOdcontenttypes(
|
datasource
|
||||||
m.getOdcontenttypesList().stream()
|
.setOdcontenttypes(
|
||||||
|
m
|
||||||
|
.getOdcontenttypesList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
datasource.setOdlanguages(
|
datasource
|
||||||
m.getOdlanguagesList().stream()
|
.setOdlanguages(
|
||||||
|
m
|
||||||
|
.getOdlanguagesList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
datasource.setOdnumberofitems(mapStringField(m.getOdnumberofitems()));
|
datasource.setOdnumberofitems(mapStringField(m.getOdnumberofitems()));
|
||||||
|
@ -165,14 +179,18 @@ public class ProtoConverter implements Serializable {
|
||||||
datasource.setOfficialname(mapStringField(m.getOfficialname()));
|
datasource.setOfficialname(mapStringField(m.getOfficialname()));
|
||||||
datasource.setOpenairecompatibility(mapQualifier(m.getOpenairecompatibility()));
|
datasource.setOpenairecompatibility(mapQualifier(m.getOpenairecompatibility()));
|
||||||
datasource.setPidsystems(mapStringField(m.getPidsystems()));
|
datasource.setPidsystems(mapStringField(m.getPidsystems()));
|
||||||
datasource.setPolicies(
|
datasource
|
||||||
|
.setPolicies(
|
||||||
m.getPoliciesList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
m.getPoliciesList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||||
datasource.setQualitymanagementkind(mapStringField(m.getQualitymanagementkind()));
|
datasource.setQualitymanagementkind(mapStringField(m.getQualitymanagementkind()));
|
||||||
datasource.setReleaseenddate(mapStringField(m.getReleaseenddate()));
|
datasource.setReleaseenddate(mapStringField(m.getReleaseenddate()));
|
||||||
datasource.setServiceprovider(mapBoolField(m.getServiceprovider()));
|
datasource.setServiceprovider(mapBoolField(m.getServiceprovider()));
|
||||||
datasource.setReleasestartdate(mapStringField(m.getReleasestartdate()));
|
datasource.setReleasestartdate(mapStringField(m.getReleasestartdate()));
|
||||||
datasource.setSubjects(
|
datasource
|
||||||
m.getSubjectsList().stream()
|
.setSubjects(
|
||||||
|
m
|
||||||
|
.getSubjectsList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
datasource.setVersioning(mapBoolField(m.getVersioning()));
|
datasource.setVersioning(mapBoolField(m.getVersioning()));
|
||||||
|
@ -204,13 +222,17 @@ public class ProtoConverter implements Serializable {
|
||||||
project.setFundedamount(m.getFundedamount());
|
project.setFundedamount(m.getFundedamount());
|
||||||
project.setTotalcost(m.getTotalcost());
|
project.setTotalcost(m.getTotalcost());
|
||||||
project.setKeywords(mapStringField(m.getKeywords()));
|
project.setKeywords(mapStringField(m.getKeywords()));
|
||||||
project.setSubjects(
|
project
|
||||||
m.getSubjectsList().stream()
|
.setSubjects(
|
||||||
|
m
|
||||||
|
.getSubjectsList()
|
||||||
|
.stream()
|
||||||
.map(sp -> mapStructuredProperty(sp))
|
.map(sp -> mapStructuredProperty(sp))
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
project.setTitle(mapStringField(m.getTitle()));
|
project.setTitle(mapStringField(m.getTitle()));
|
||||||
project.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
project.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
||||||
project.setFundingtree(
|
project
|
||||||
|
.setFundingtree(
|
||||||
m.getFundingtreeList().stream().map(f -> mapStringField(f)).collect(Collectors.toList()));
|
m.getFundingtreeList().stream().map(f -> mapStringField(f)).collect(Collectors.toList()));
|
||||||
project.setJsonextrainfo(mapStringField(m.getJsonextrainfo()));
|
project.setJsonextrainfo(mapStringField(m.getJsonextrainfo()));
|
||||||
project.setSummary(mapStringField(m.getSummary()));
|
project.setSummary(mapStringField(m.getSummary()));
|
||||||
|
@ -242,12 +264,18 @@ public class ProtoConverter implements Serializable {
|
||||||
setEntity(software, oaf);
|
setEntity(software, oaf);
|
||||||
setResult(software, oaf);
|
setResult(software, oaf);
|
||||||
|
|
||||||
software.setDocumentationUrl(
|
software
|
||||||
m.getDocumentationUrlList().stream()
|
.setDocumentationUrl(
|
||||||
|
m
|
||||||
|
.getDocumentationUrlList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
software.setLicense(
|
software
|
||||||
m.getLicenseList().stream()
|
.setLicense(
|
||||||
|
m
|
||||||
|
.getLicenseList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
software.setCodeRepositoryUrl(mapStringField(m.getCodeRepositoryUrl()));
|
software.setCodeRepositoryUrl(mapStringField(m.getCodeRepositoryUrl()));
|
||||||
|
@ -260,15 +288,22 @@ public class ProtoConverter implements Serializable {
|
||||||
OtherResearchProduct otherResearchProducts = setOaf(new OtherResearchProduct(), oaf);
|
OtherResearchProduct otherResearchProducts = setOaf(new OtherResearchProduct(), oaf);
|
||||||
setEntity(otherResearchProducts, oaf);
|
setEntity(otherResearchProducts, oaf);
|
||||||
setResult(otherResearchProducts, oaf);
|
setResult(otherResearchProducts, oaf);
|
||||||
otherResearchProducts.setContactperson(
|
otherResearchProducts
|
||||||
m.getContactpersonList().stream()
|
.setContactperson(
|
||||||
|
m
|
||||||
|
.getContactpersonList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
otherResearchProducts.setContactgroup(
|
otherResearchProducts
|
||||||
m.getContactgroupList().stream()
|
.setContactgroup(
|
||||||
|
m
|
||||||
|
.getContactgroupList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
otherResearchProducts.setTool(
|
otherResearchProducts
|
||||||
|
.setTool(
|
||||||
m.getToolList().stream().map(ProtoConverter::mapStringField).collect(Collectors.toList()));
|
m.getToolList().stream().map(ProtoConverter::mapStringField).collect(Collectors.toList()));
|
||||||
|
|
||||||
return otherResearchProducts;
|
return otherResearchProducts;
|
||||||
|
@ -296,8 +331,11 @@ public class ProtoConverter implements Serializable {
|
||||||
dataset.setVersion(mapStringField(m.getVersion()));
|
dataset.setVersion(mapStringField(m.getVersion()));
|
||||||
dataset.setLastmetadataupdate(mapStringField(m.getLastmetadataupdate()));
|
dataset.setLastmetadataupdate(mapStringField(m.getLastmetadataupdate()));
|
||||||
dataset.setMetadataversionnumber(mapStringField(m.getMetadataversionnumber()));
|
dataset.setMetadataversionnumber(mapStringField(m.getMetadataversionnumber()));
|
||||||
dataset.setGeolocation(
|
dataset
|
||||||
m.getGeolocationList().stream()
|
.setGeolocation(
|
||||||
|
m
|
||||||
|
.getGeolocationList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapGeolocation)
|
.map(ProtoConverter::mapGeolocation)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
return dataset;
|
return dataset;
|
||||||
|
@ -314,16 +352,23 @@ public class ProtoConverter implements Serializable {
|
||||||
final OafProtos.OafEntity e = oaf.getEntity();
|
final OafProtos.OafEntity e = oaf.getEntity();
|
||||||
entity.setId(e.getId());
|
entity.setId(e.getId());
|
||||||
entity.setOriginalId(e.getOriginalIdList());
|
entity.setOriginalId(e.getOriginalIdList());
|
||||||
entity.setCollectedfrom(
|
entity
|
||||||
|
.setCollectedfrom(
|
||||||
e.getCollectedfromList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
e.getCollectedfromList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||||
entity.setPid(
|
entity
|
||||||
e.getPidList().stream()
|
.setPid(
|
||||||
|
e
|
||||||
|
.getPidList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setDateofcollection(e.getDateofcollection());
|
entity.setDateofcollection(e.getDateofcollection());
|
||||||
entity.setDateoftransformation(e.getDateoftransformation());
|
entity.setDateoftransformation(e.getDateoftransformation());
|
||||||
entity.setExtraInfo(
|
entity
|
||||||
e.getExtraInfoList().stream()
|
.setExtraInfo(
|
||||||
|
e
|
||||||
|
.getExtraInfoList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapExtraInfo)
|
.map(ProtoConverter::mapExtraInfo)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
return entity;
|
return entity;
|
||||||
|
@ -332,55 +377,87 @@ public class ProtoConverter implements Serializable {
|
||||||
public static <T extends Result> T setResult(T entity, OafProtos.Oaf oaf) {
|
public static <T extends Result> T setResult(T entity, OafProtos.Oaf oaf) {
|
||||||
// setting Entity fields
|
// setting Entity fields
|
||||||
final ResultProtos.Result.Metadata m = oaf.getEntity().getResult().getMetadata();
|
final ResultProtos.Result.Metadata m = oaf.getEntity().getResult().getMetadata();
|
||||||
entity.setAuthor(
|
entity
|
||||||
|
.setAuthor(
|
||||||
m.getAuthorList().stream().map(ProtoConverter::mapAuthor).collect(Collectors.toList()));
|
m.getAuthorList().stream().map(ProtoConverter::mapAuthor).collect(Collectors.toList()));
|
||||||
entity.setResulttype(mapQualifier(m.getResulttype()));
|
entity.setResulttype(mapQualifier(m.getResulttype()));
|
||||||
entity.setLanguage(mapQualifier(m.getLanguage()));
|
entity.setLanguage(mapQualifier(m.getLanguage()));
|
||||||
entity.setCountry(
|
entity
|
||||||
m.getCountryList().stream()
|
.setCountry(
|
||||||
|
m
|
||||||
|
.getCountryList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapQualifierAsCountry)
|
.map(ProtoConverter::mapQualifierAsCountry)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setSubject(
|
entity
|
||||||
m.getSubjectList().stream()
|
.setSubject(
|
||||||
|
m
|
||||||
|
.getSubjectList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setTitle(
|
entity
|
||||||
m.getTitleList().stream()
|
.setTitle(
|
||||||
|
m
|
||||||
|
.getTitleList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setRelevantdate(
|
entity
|
||||||
m.getRelevantdateList().stream()
|
.setRelevantdate(
|
||||||
|
m
|
||||||
|
.getRelevantdateList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setDescription(
|
entity
|
||||||
m.getDescriptionList().stream()
|
.setDescription(
|
||||||
|
m
|
||||||
|
.getDescriptionList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setDateofacceptance(mapStringField(m.getDateofacceptance()));
|
entity.setDateofacceptance(mapStringField(m.getDateofacceptance()));
|
||||||
entity.setPublisher(mapStringField(m.getPublisher()));
|
entity.setPublisher(mapStringField(m.getPublisher()));
|
||||||
entity.setEmbargoenddate(mapStringField(m.getEmbargoenddate()));
|
entity.setEmbargoenddate(mapStringField(m.getEmbargoenddate()));
|
||||||
entity.setSource(
|
entity
|
||||||
m.getSourceList().stream()
|
.setSource(
|
||||||
|
m
|
||||||
|
.getSourceList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setFulltext(
|
entity
|
||||||
m.getFulltextList().stream()
|
.setFulltext(
|
||||||
|
m
|
||||||
|
.getFulltextList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setFormat(
|
entity
|
||||||
m.getFormatList().stream()
|
.setFormat(
|
||||||
|
m
|
||||||
|
.getFormatList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setContributor(
|
entity
|
||||||
m.getContributorList().stream()
|
.setContributor(
|
||||||
|
m
|
||||||
|
.getContributorList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setResourcetype(mapQualifier(m.getResourcetype()));
|
entity.setResourcetype(mapQualifier(m.getResourcetype()));
|
||||||
entity.setCoverage(
|
entity
|
||||||
m.getCoverageList().stream()
|
.setCoverage(
|
||||||
|
m
|
||||||
|
.getCoverageList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setContext(
|
entity
|
||||||
|
.setContext(
|
||||||
m.getContextList().stream().map(ProtoConverter::mapContext).collect(Collectors.toList()));
|
m.getContextList().stream().map(ProtoConverter::mapContext).collect(Collectors.toList()));
|
||||||
|
|
||||||
entity.setBestaccessright(getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
entity.setBestaccessright(getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
||||||
|
@ -390,8 +467,10 @@ public class ProtoConverter implements Serializable {
|
||||||
|
|
||||||
private static Qualifier getBestAccessRights(List<ResultProtos.Result.Instance> instanceList) {
|
private static Qualifier getBestAccessRights(List<ResultProtos.Result.Instance> instanceList) {
|
||||||
if (instanceList != null) {
|
if (instanceList != null) {
|
||||||
final Optional<FieldTypeProtos.Qualifier> min =
|
final Optional<FieldTypeProtos.Qualifier> min = instanceList
|
||||||
instanceList.stream().map(i -> i.getAccessright()).min(new LicenseComparator());
|
.stream()
|
||||||
|
.map(i -> i.getAccessright())
|
||||||
|
.min(new LicenseComparator());
|
||||||
|
|
||||||
final Qualifier rights = min.isPresent() ? mapQualifier(min.get()) : new Qualifier();
|
final Qualifier rights = min.isPresent() ? mapQualifier(min.get()) : new Qualifier();
|
||||||
|
|
||||||
|
@ -418,8 +497,11 @@ public class ProtoConverter implements Serializable {
|
||||||
|
|
||||||
final Context entity = new Context();
|
final Context entity = new Context();
|
||||||
entity.setId(context.getId());
|
entity.setId(context.getId());
|
||||||
entity.setDataInfo(
|
entity
|
||||||
context.getDataInfoList().stream()
|
.setDataInfo(
|
||||||
|
context
|
||||||
|
.getDataInfoList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapDataInfo)
|
.map(ProtoConverter::mapDataInfo)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
return entity;
|
return entity;
|
||||||
|
@ -543,8 +625,11 @@ public class ProtoConverter implements Serializable {
|
||||||
entity.setName(author.getName());
|
entity.setName(author.getName());
|
||||||
entity.setSurname(author.getSurname());
|
entity.setSurname(author.getSurname());
|
||||||
entity.setRank(author.getRank());
|
entity.setRank(author.getRank());
|
||||||
entity.setPid(
|
entity
|
||||||
author.getPidList().stream()
|
.setPid(
|
||||||
|
author
|
||||||
|
.getPidList()
|
||||||
|
.stream()
|
||||||
.map(
|
.map(
|
||||||
kv -> {
|
kv -> {
|
||||||
final StructuredProperty sp = new StructuredProperty();
|
final StructuredProperty sp = new StructuredProperty();
|
||||||
|
@ -556,8 +641,11 @@ public class ProtoConverter implements Serializable {
|
||||||
return sp;
|
return sp;
|
||||||
})
|
})
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setAffiliation(
|
entity
|
||||||
author.getAffiliationList().stream()
|
.setAffiliation(
|
||||||
|
author
|
||||||
|
.getAffiliationList()
|
||||||
|
.stream()
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
return entity;
|
return entity;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.migration;
|
package eu.dnetlib.dhp.actionmanager.migration;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
@ -40,15 +41,16 @@ public class TransformActions implements Serializable {
|
||||||
private static final String SEPARATOR = "/";
|
private static final String SEPARATOR = "/";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser =
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
new ArgumentApplicationParser(
|
IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
MigrateActionSet.class.getResourceAsStream(
|
MigrateActionSet.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/actionmanager/migration/transform_actionsets_parameters.json")));
|
"/eu/dnetlib/dhp/actionmanager/migration/transform_actionsets_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged =
|
Boolean isSparkSessionManaged = Optional
|
||||||
Optional.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
@ -83,8 +85,7 @@ public class TransformActions implements Serializable {
|
||||||
final String rawset = pathQ.pollLast();
|
final String rawset = pathQ.pollLast();
|
||||||
final String actionSetDirectory = pathQ.pollLast();
|
final String actionSetDirectory = pathQ.pollLast();
|
||||||
|
|
||||||
final Path targetDirectory =
|
final Path targetDirectory = new Path(targetBaseDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawset);
|
||||||
new Path(targetBaseDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawset);
|
|
||||||
|
|
||||||
if (fs.exists(targetDirectory)) {
|
if (fs.exists(targetDirectory)) {
|
||||||
log.info("found target directory '{}", targetDirectory);
|
log.info("found target directory '{}", targetDirectory);
|
||||||
|
@ -94,7 +95,8 @@ public class TransformActions implements Serializable {
|
||||||
|
|
||||||
log.info("transforming actions from '{}' to '{}'", sourcePath, targetDirectory);
|
log.info("transforming actions from '{}' to '{}'", sourcePath, targetDirectory);
|
||||||
|
|
||||||
sc.sequenceFile(sourcePath, Text.class, Text.class)
|
sc
|
||||||
|
.sequenceFile(sourcePath, Text.class, Text.class)
|
||||||
.map(a -> eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(a._2().toString()))
|
.map(a -> eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(a._2().toString()))
|
||||||
.map(TransformActions::doTransform)
|
.map(TransformActions::doTransform)
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
|
@ -129,8 +131,12 @@ public class TransformActions implements Serializable {
|
||||||
case project:
|
case project:
|
||||||
return new AtomicAction<>(Project.class, (Project) oaf);
|
return new AtomicAction<>(Project.class, (Project) oaf);
|
||||||
case result:
|
case result:
|
||||||
final String resulttypeid =
|
final String resulttypeid = proto_oaf
|
||||||
proto_oaf.getEntity().getResult().getMetadata().getResulttype().getClassid();
|
.getEntity()
|
||||||
|
.getResult()
|
||||||
|
.getMetadata()
|
||||||
|
.getResulttype()
|
||||||
|
.getClassid();
|
||||||
switch (resulttypeid) {
|
switch (resulttypeid) {
|
||||||
case "publication":
|
case "publication":
|
||||||
return new AtomicAction<>(Publication.class, (Publication) oaf);
|
return new AtomicAction<>(Publication.class, (Publication) oaf);
|
||||||
|
@ -157,8 +163,7 @@ public class TransformActions implements Serializable {
|
||||||
|
|
||||||
private static String getTargetBaseDir(String isLookupUrl) throws ISLookUpException {
|
private static String getTargetBaseDir(String isLookupUrl) throws ISLookUpException {
|
||||||
ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
||||||
String XQUERY =
|
String XQUERY = "collection('/db/DRIVER/ServiceResources/ActionManagerServiceResourceType')//SERVICE_PROPERTIES/PROPERTY[@key = 'basePath']/@value/string()";
|
||||||
"collection('/db/DRIVER/ServiceResources/ActionManagerServiceResourceType')//SERVICE_PROPERTIES/PROPERTY[@key = 'basePath']/@value/string()";
|
|
||||||
return isLookUp.getResourceProfileByQuery(XQUERY);
|
return isLookUp.getResourceProfileByQuery(XQUERY);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.partition;
|
package eu.dnetlib.dhp.actionmanager.partition;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
@ -23,20 +24,22 @@ import org.slf4j.LoggerFactory;
|
||||||
/** Partitions given set of action sets by payload type. */
|
/** Partitions given set of action sets by payload type. */
|
||||||
public class PartitionActionSetsByPayloadTypeJob {
|
public class PartitionActionSetsByPayloadTypeJob {
|
||||||
|
|
||||||
private static final Logger logger =
|
private static final Logger logger = LoggerFactory.getLogger(PartitionActionSetsByPayloadTypeJob.class);
|
||||||
LoggerFactory.getLogger(PartitionActionSetsByPayloadTypeJob.class);
|
|
||||||
|
|
||||||
private static final StructType KV_SCHEMA =
|
private static final StructType KV_SCHEMA = StructType$.MODULE$
|
||||||
StructType$.MODULE$.apply(
|
.apply(
|
||||||
Arrays.asList(
|
Arrays
|
||||||
|
.asList(
|
||||||
StructField$.MODULE$.apply("key", DataTypes.StringType, false, Metadata.empty()),
|
StructField$.MODULE$.apply("key", DataTypes.StringType, false, Metadata.empty()),
|
||||||
StructField$.MODULE$.apply("value", DataTypes.StringType, false, Metadata.empty())));
|
StructField$.MODULE$.apply("value", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
private static final StructType ATOMIC_ACTION_SCHEMA = StructType$.MODULE$
|
||||||
StructType$.MODULE$.apply(
|
.apply(
|
||||||
Arrays.asList(
|
Arrays
|
||||||
|
.asList(
|
||||||
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$
|
||||||
|
.apply(
|
||||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
private ISClient isClient;
|
private ISClient isClient;
|
||||||
|
@ -45,18 +48,20 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
this.isClient = new ISClient(isLookupUrl);
|
this.isClient = new ISClient(isLookupUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
public PartitionActionSetsByPayloadTypeJob() {}
|
public PartitionActionSetsByPayloadTypeJob() {
|
||||||
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration =
|
String jsonConfiguration = IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
PromoteActionPayloadForGraphTableJob.class.getResourceAsStream(
|
PromoteActionPayloadForGraphTableJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/actionmanager/partition/partition_action_sets_by_payload_type_input_parameters.json"));
|
"/eu/dnetlib/dhp/actionmanager/partition/partition_action_sets_by_payload_type_input_parameters.json"));
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged =
|
Boolean isSparkSessionManaged = Optional
|
||||||
Optional.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
@ -97,7 +102,8 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
|
|
||||||
private static void readAndWriteActionSetsFromPaths(
|
private static void readAndWriteActionSetsFromPaths(
|
||||||
SparkSession spark, List<String> inputActionSetPaths, String outputPath) {
|
SparkSession spark, List<String> inputActionSetPaths, String outputPath) {
|
||||||
inputActionSetPaths.stream()
|
inputActionSetPaths
|
||||||
|
.stream()
|
||||||
.filter(path -> HdfsSupport.exists(path, spark.sparkContext().hadoopConfiguration()))
|
.filter(path -> HdfsSupport.exists(path, spark.sparkContext().hadoopConfiguration()))
|
||||||
.forEach(
|
.forEach(
|
||||||
inputActionSetPath -> {
|
inputActionSetPath -> {
|
||||||
|
@ -111,8 +117,8 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
|
|
||||||
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Row> rdd =
|
JavaRDD<Row> rdd = sc
|
||||||
sc.sequenceFile(path, Text.class, Text.class)
|
.sequenceFile(path, Text.class, Text.class)
|
||||||
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
||||||
|
|
||||||
return spark
|
return spark
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
||||||
|
@ -11,17 +12,17 @@ import java.util.function.BiFunction;
|
||||||
/** OAF model merging support. */
|
/** OAF model merging support. */
|
||||||
public class MergeAndGet {
|
public class MergeAndGet {
|
||||||
|
|
||||||
private MergeAndGet() {}
|
private MergeAndGet() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Strategy for merging OAF model objects.
|
* Strategy for merging OAF model objects.
|
||||||
*
|
* <p>
|
||||||
* <p>MERGE_FROM_AND_GET: use OAF 'mergeFrom' method SELECT_NEWER_AND_GET: use last update
|
* MERGE_FROM_AND_GET: use OAF 'mergeFrom' method SELECT_NEWER_AND_GET: use last update timestamp to return newer
|
||||||
* timestamp to return newer instance
|
* instance
|
||||||
*/
|
*/
|
||||||
public enum Strategy {
|
public enum Strategy {
|
||||||
MERGE_FROM_AND_GET,
|
MERGE_FROM_AND_GET, SELECT_NEWER_AND_GET
|
||||||
SELECT_NEWER_AND_GET
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -32,8 +33,8 @@ public class MergeAndGet {
|
||||||
* @param <A> Action payload type
|
* @param <A> Action payload type
|
||||||
* @return BiFunction to be used to merge OAF objects
|
* @return BiFunction to be used to merge OAF objects
|
||||||
*/
|
*/
|
||||||
public static <G extends Oaf, A extends Oaf>
|
public static <G extends Oaf, A extends Oaf> SerializableSupplier<BiFunction<G, A, G>> functionFor(
|
||||||
SerializableSupplier<BiFunction<G, A, G>> functionFor(Strategy strategy) {
|
Strategy strategy) {
|
||||||
switch (strategy) {
|
switch (strategy) {
|
||||||
case MERGE_FROM_AND_GET:
|
case MERGE_FROM_AND_GET:
|
||||||
return () -> MergeAndGet::mergeFromAndGet;
|
return () -> MergeAndGet::mergeFromAndGet;
|
||||||
|
@ -54,7 +55,8 @@ public class MergeAndGet {
|
||||||
return x;
|
return x;
|
||||||
}
|
}
|
||||||
throw new RuntimeException(
|
throw new RuntimeException(
|
||||||
String.format(
|
String
|
||||||
|
.format(
|
||||||
"MERGE_FROM_AND_GET incompatible types: %s, %s",
|
"MERGE_FROM_AND_GET incompatible types: %s, %s",
|
||||||
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
||||||
}
|
}
|
||||||
|
@ -70,12 +72,14 @@ public class MergeAndGet {
|
||||||
return x;
|
return x;
|
||||||
} else if (isSubClass(x, y) && x.getLastupdatetimestamp() < y.getLastupdatetimestamp()) {
|
} else if (isSubClass(x, y) && x.getLastupdatetimestamp() < y.getLastupdatetimestamp()) {
|
||||||
throw new RuntimeException(
|
throw new RuntimeException(
|
||||||
String.format(
|
String
|
||||||
|
.format(
|
||||||
"SELECT_NEWER_AND_GET cannot return right type when it is not the same as left type: %s, %s",
|
"SELECT_NEWER_AND_GET cannot return right type when it is not the same as left type: %s, %s",
|
||||||
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
||||||
}
|
}
|
||||||
throw new RuntimeException(
|
throw new RuntimeException(
|
||||||
String.format(
|
String
|
||||||
|
.format(
|
||||||
"SELECT_NEWER_AND_GET cannot be used when left is not subtype of right: %s, %s",
|
"SELECT_NEWER_AND_GET cannot be used when left is not subtype of right: %s, %s",
|
||||||
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
x.getClass().getCanonicalName(), y.getClass().getCanonicalName()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
@ -25,21 +26,21 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/** Applies a given action payload file to graph table of compatible type. */
|
/** Applies a given action payload file to graph table of compatible type. */
|
||||||
public class PromoteActionPayloadForGraphTableJob {
|
public class PromoteActionPayloadForGraphTableJob {
|
||||||
private static final Logger logger =
|
private static final Logger logger = LoggerFactory.getLogger(PromoteActionPayloadForGraphTableJob.class);
|
||||||
LoggerFactory.getLogger(PromoteActionPayloadForGraphTableJob.class);
|
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration =
|
String jsonConfiguration = IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
PromoteActionPayloadForGraphTableJob.class.getResourceAsStream(
|
PromoteActionPayloadForGraphTableJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/actionmanager/promote/promote_action_payload_for_graph_table_input_parameters.json"));
|
"/eu/dnetlib/dhp/actionmanager/promote/promote_action_payload_for_graph_table_input_parameters.json"));
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged =
|
Boolean isSparkSessionManaged = Optional
|
||||||
Optional.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
@ -59,13 +60,11 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
String outputGraphTablePath = parser.get("outputGraphTablePath");
|
String outputGraphTablePath = parser.get("outputGraphTablePath");
|
||||||
logger.info("outputGraphTablePath: {}", outputGraphTablePath);
|
logger.info("outputGraphTablePath: {}", outputGraphTablePath);
|
||||||
|
|
||||||
MergeAndGet.Strategy strategy =
|
MergeAndGet.Strategy strategy = MergeAndGet.Strategy.valueOf(parser.get("mergeAndGetStrategy").toUpperCase());
|
||||||
MergeAndGet.Strategy.valueOf(parser.get("mergeAndGetStrategy").toUpperCase());
|
|
||||||
logger.info("strategy: {}", strategy);
|
logger.info("strategy: {}", strategy);
|
||||||
|
|
||||||
Class<? extends Oaf> rowClazz = (Class<? extends Oaf>) Class.forName(graphTableClassName);
|
Class<? extends Oaf> rowClazz = (Class<? extends Oaf>) Class.forName(graphTableClassName);
|
||||||
Class<? extends Oaf> actionPayloadClazz =
|
Class<? extends Oaf> actionPayloadClazz = (Class<? extends Oaf>) Class.forName(actionPayloadClassName);
|
||||||
(Class<? extends Oaf>) Class.forName(actionPayloadClassName);
|
|
||||||
|
|
||||||
throwIfGraphTableClassIsNotSubClassOfActionPayloadClass(rowClazz, actionPayloadClazz);
|
throwIfGraphTableClassIsNotSubClassOfActionPayloadClass(rowClazz, actionPayloadClazz);
|
||||||
|
|
||||||
|
@ -92,8 +91,8 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
private static void throwIfGraphTableClassIsNotSubClassOfActionPayloadClass(
|
private static void throwIfGraphTableClassIsNotSubClassOfActionPayloadClass(
|
||||||
Class<? extends Oaf> rowClazz, Class<? extends Oaf> actionPayloadClazz) {
|
Class<? extends Oaf> rowClazz, Class<? extends Oaf> actionPayloadClazz) {
|
||||||
if (!isSubClass(rowClazz, actionPayloadClazz)) {
|
if (!isSubClass(rowClazz, actionPayloadClazz)) {
|
||||||
String msg =
|
String msg = String
|
||||||
String.format(
|
.format(
|
||||||
"graph table class is not a subclass of action payload class: graph=%s, action=%s",
|
"graph table class is not a subclass of action payload class: graph=%s, action=%s",
|
||||||
rowClazz.getCanonicalName(), actionPayloadClazz.getCanonicalName());
|
rowClazz.getCanonicalName(), actionPayloadClazz.getCanonicalName());
|
||||||
throw new RuntimeException(msg);
|
throw new RuntimeException(msg);
|
||||||
|
@ -113,11 +112,9 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
Class<G> rowClazz,
|
Class<G> rowClazz,
|
||||||
Class<A> actionPayloadClazz) {
|
Class<A> actionPayloadClazz) {
|
||||||
Dataset<G> rowDS = readGraphTable(spark, inputGraphTablePath, rowClazz);
|
Dataset<G> rowDS = readGraphTable(spark, inputGraphTablePath, rowClazz);
|
||||||
Dataset<A> actionPayloadDS =
|
Dataset<A> actionPayloadDS = readActionPayload(spark, inputActionPayloadPath, actionPayloadClazz);
|
||||||
readActionPayload(spark, inputActionPayloadPath, actionPayloadClazz);
|
|
||||||
|
|
||||||
Dataset<G> result =
|
Dataset<G> result = promoteActionPayloadForGraphTable(
|
||||||
promoteActionPayloadForGraphTable(
|
|
||||||
rowDS, actionPayloadDS, strategy, rowClazz, actionPayloadClazz)
|
rowDS, actionPayloadDS, strategy, rowClazz, actionPayloadClazz)
|
||||||
.map((MapFunction<G, G>) value -> value, Encoders.bean(rowClazz));
|
.map((MapFunction<G, G>) value -> value, Encoders.bean(rowClazz));
|
||||||
|
|
||||||
|
@ -147,9 +144,8 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
.read()
|
.read()
|
||||||
.parquet(path)
|
.parquet(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Row, A>)
|
(MapFunction<Row, A>) value -> OBJECT_MAPPER
|
||||||
value ->
|
.readValue(value.<String> getAs("payload"), actionPayloadClazz),
|
||||||
OBJECT_MAPPER.readValue(value.<String>getAs("payload"), actionPayloadClazz),
|
|
||||||
Encoders.bean(actionPayloadClazz));
|
Encoders.bean(actionPayloadClazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -159,22 +155,21 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
MergeAndGet.Strategy strategy,
|
MergeAndGet.Strategy strategy,
|
||||||
Class<G> rowClazz,
|
Class<G> rowClazz,
|
||||||
Class<A> actionPayloadClazz) {
|
Class<A> actionPayloadClazz) {
|
||||||
logger.info(
|
logger
|
||||||
|
.info(
|
||||||
"Promoting action payload for graph table: payload={}, table={}",
|
"Promoting action payload for graph table: payload={}, table={}",
|
||||||
actionPayloadClazz.getSimpleName(),
|
actionPayloadClazz.getSimpleName(),
|
||||||
rowClazz.getSimpleName());
|
rowClazz.getSimpleName());
|
||||||
|
|
||||||
SerializableSupplier<Function<G, String>> rowIdFn = ModelSupport::idFn;
|
SerializableSupplier<Function<G, String>> rowIdFn = ModelSupport::idFn;
|
||||||
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
||||||
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn =
|
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn = MergeAndGet.functionFor(strategy);
|
||||||
MergeAndGet.functionFor(strategy);
|
|
||||||
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(strategy);
|
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(strategy);
|
||||||
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
||||||
SerializableSupplier<Function<G, Boolean>> isNotZeroFn =
|
SerializableSupplier<Function<G, Boolean>> isNotZeroFn = PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSource;
|
||||||
PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSource;
|
|
||||||
|
|
||||||
Dataset<G> joinedAndMerged =
|
Dataset<G> joinedAndMerged = PromoteActionPayloadFunctions
|
||||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
.joinGraphTableWithActionPayloadAndMerge(
|
||||||
rowDS,
|
rowDS,
|
||||||
actionPayloadDS,
|
actionPayloadDS,
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
|
@ -183,7 +178,8 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
rowClazz,
|
rowClazz,
|
||||||
actionPayloadClazz);
|
actionPayloadClazz);
|
||||||
|
|
||||||
return PromoteActionPayloadFunctions.groupGraphTableByIdAndMerge(
|
return PromoteActionPayloadFunctions
|
||||||
|
.groupGraphTableByIdAndMerge(
|
||||||
joinedAndMerged, rowIdFn, mergeRowsAndGetFn, zeroFn, isNotZeroFn, rowClazz);
|
joinedAndMerged, rowIdFn, mergeRowsAndGetFn, zeroFn, isNotZeroFn, rowClazz);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
import static eu.dnetlib.dhp.schema.common.ModelSupport.isSubClass;
|
||||||
|
@ -20,11 +21,11 @@ import scala.Tuple2;
|
||||||
/** Promote action payload functions. */
|
/** Promote action payload functions. */
|
||||||
public class PromoteActionPayloadFunctions {
|
public class PromoteActionPayloadFunctions {
|
||||||
|
|
||||||
private PromoteActionPayloadFunctions() {}
|
private PromoteActionPayloadFunctions() {
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Joins dataset representing graph table with dataset representing action payload using supplied
|
* Joins dataset representing graph table with dataset representing action payload using supplied functions.
|
||||||
* functions.
|
|
||||||
*
|
*
|
||||||
* @param rowDS Dataset representing graph table
|
* @param rowDS Dataset representing graph table
|
||||||
* @param actionPayloadDS Dataset representing action payload
|
* @param actionPayloadDS Dataset representing action payload
|
||||||
|
@ -51,8 +52,8 @@ public class PromoteActionPayloadFunctions {
|
||||||
}
|
}
|
||||||
|
|
||||||
Dataset<Tuple2<String, G>> rowWithIdDS = mapToTupleWithId(rowDS, rowIdFn, rowClazz);
|
Dataset<Tuple2<String, G>> rowWithIdDS = mapToTupleWithId(rowDS, rowIdFn, rowClazz);
|
||||||
Dataset<Tuple2<String, A>> actionPayloadWithIdDS =
|
Dataset<Tuple2<String, A>> actionPayloadWithIdDS = mapToTupleWithId(
|
||||||
mapToTupleWithId(actionPayloadDS, actionPayloadIdFn, actionPayloadClazz);
|
actionPayloadDS, actionPayloadIdFn, actionPayloadClazz);
|
||||||
|
|
||||||
return rowWithIdDS
|
return rowWithIdDS
|
||||||
.joinWith(
|
.joinWith(
|
||||||
|
@ -60,21 +61,17 @@ public class PromoteActionPayloadFunctions {
|
||||||
rowWithIdDS.col("_1").equalTo(actionPayloadWithIdDS.col("_1")),
|
rowWithIdDS.col("_1").equalTo(actionPayloadWithIdDS.col("_1")),
|
||||||
"full_outer")
|
"full_outer")
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>)
|
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>) value -> {
|
||||||
value -> {
|
|
||||||
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
||||||
Optional<A> actionPayloadOpt = Optional.ofNullable(value._2()).map(Tuple2::_2);
|
Optional<A> actionPayloadOpt = Optional.ofNullable(value._2()).map(Tuple2::_2);
|
||||||
return rowOpt
|
return rowOpt
|
||||||
.map(
|
.map(
|
||||||
row ->
|
row -> actionPayloadOpt
|
||||||
actionPayloadOpt
|
|
||||||
.map(
|
.map(
|
||||||
actionPayload ->
|
actionPayload -> mergeAndGetFn.get().apply(row, actionPayload))
|
||||||
mergeAndGetFn.get().apply(row, actionPayload))
|
|
||||||
.orElse(row))
|
.orElse(row))
|
||||||
.orElseGet(
|
.orElseGet(
|
||||||
() ->
|
() -> actionPayloadOpt
|
||||||
actionPayloadOpt
|
|
||||||
.filter(
|
.filter(
|
||||||
actionPayload -> actionPayload.getClass().equals(rowClazz))
|
actionPayload -> actionPayload.getClass().equals(rowClazz))
|
||||||
.map(rowClazz::cast)
|
.map(rowClazz::cast)
|
||||||
|
@ -86,7 +83,8 @@ public class PromoteActionPayloadFunctions {
|
||||||
|
|
||||||
private static <T extends Oaf> Dataset<Tuple2<String, T>> mapToTupleWithId(
|
private static <T extends Oaf> Dataset<Tuple2<String, T>> mapToTupleWithId(
|
||||||
Dataset<T> ds, SerializableSupplier<Function<T, String>> idFn, Class<T> clazz) {
|
Dataset<T> ds, SerializableSupplier<Function<T, String>> idFn, Class<T> clazz) {
|
||||||
return ds.map(
|
return ds
|
||||||
|
.map(
|
||||||
(MapFunction<T, Tuple2<String, T>>) value -> new Tuple2<>(idFn.get().apply(value), value),
|
(MapFunction<T, Tuple2<String, T>>) value -> new Tuple2<>(idFn.get().apply(value), value),
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||||
}
|
}
|
||||||
|
@ -110,8 +108,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
SerializableSupplier<G> zeroFn,
|
SerializableSupplier<G> zeroFn,
|
||||||
SerializableSupplier<Function<G, Boolean>> isNotZeroFn,
|
SerializableSupplier<Function<G, Boolean>> isNotZeroFn,
|
||||||
Class<G> rowClazz) {
|
Class<G> rowClazz) {
|
||||||
TypedColumn<G, G> aggregator =
|
TypedColumn<G, G> aggregator = new TableAggregator<>(zeroFn, mergeAndGetFn, isNotZeroFn, rowClazz).toColumn();
|
||||||
new TableAggregator<>(zeroFn, mergeAndGetFn, isNotZeroFn, rowClazz).toColumn();
|
|
||||||
return rowDS
|
return rowDS
|
||||||
.groupByKey((MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
.groupByKey((MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
||||||
.agg(aggregator)
|
.agg(aggregator)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.partition;
|
package eu.dnetlib.dhp.actionmanager.partition;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.ThrowingSupport.rethrowAsRuntimeException;
|
import static eu.dnetlib.dhp.common.ThrowingSupport.rethrowAsRuntimeException;
|
||||||
|
@ -37,19 +38,20 @@ import scala.collection.mutable.Seq;
|
||||||
|
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class PartitionActionSetsByPayloadTypeJobTest {
|
public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
private static final ClassLoader cl =
|
private static final ClassLoader cl = PartitionActionSetsByPayloadTypeJobTest.class.getClassLoader();
|
||||||
PartitionActionSetsByPayloadTypeJobTest.class.getClassLoader();
|
|
||||||
|
|
||||||
private static Configuration configuration;
|
private static Configuration configuration;
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
private static final StructType ATOMIC_ACTION_SCHEMA = StructType$.MODULE$
|
||||||
StructType$.MODULE$.apply(
|
.apply(
|
||||||
Arrays.asList(
|
Arrays
|
||||||
|
.asList(
|
||||||
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$
|
||||||
|
.apply(
|
||||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
@BeforeAll
|
@BeforeAll
|
||||||
|
@ -71,7 +73,8 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
@Nested
|
@Nested
|
||||||
class Main {
|
class Main {
|
||||||
|
|
||||||
@Mock private ISClient isClient;
|
@Mock
|
||||||
|
private ISClient isClient;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir) throws Exception {
|
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir) throws Exception {
|
||||||
|
@ -84,12 +87,14 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
List<String> inputActionSetsPaths = resolveInputActionSetPaths(inputActionSetsBaseDir);
|
List<String> inputActionSetsPaths = resolveInputActionSetPaths(inputActionSetsBaseDir);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
Mockito.when(isClient.getLatestRawsetPaths(Mockito.anyString()))
|
Mockito
|
||||||
|
.when(isClient.getLatestRawsetPaths(Mockito.anyString()))
|
||||||
.thenReturn(inputActionSetsPaths);
|
.thenReturn(inputActionSetsPaths);
|
||||||
|
|
||||||
PartitionActionSetsByPayloadTypeJob job = new PartitionActionSetsByPayloadTypeJob();
|
PartitionActionSetsByPayloadTypeJob job = new PartitionActionSetsByPayloadTypeJob();
|
||||||
job.setIsClient(isClient);
|
job.setIsClient(isClient);
|
||||||
job.run(
|
job
|
||||||
|
.run(
|
||||||
Boolean.FALSE,
|
Boolean.FALSE,
|
||||||
"", // it can be empty we're mocking the response from isClient
|
"", // it can be empty we're mocking the response from isClient
|
||||||
// to
|
// to
|
||||||
|
@ -114,7 +119,8 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
|
|
||||||
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir) throws IOException {
|
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir) throws IOException {
|
||||||
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
||||||
return Files.list(inputActionSetJsonDumpsDir)
|
return Files
|
||||||
|
.list(inputActionSetJsonDumpsDir)
|
||||||
.map(
|
.map(
|
||||||
path -> {
|
path -> {
|
||||||
String inputActionSetId = path.getFileName().toString();
|
String inputActionSetId = path.getFileName().toString();
|
||||||
|
@ -128,29 +134,31 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
||||||
|
|
||||||
Map<String, List<String>> oafsByType = new HashMap<>();
|
Map<String, List<String>> oafsByType = new HashMap<>();
|
||||||
Files.list(inputActionSetJsonDumpsDir)
|
Files
|
||||||
|
.list(inputActionSetJsonDumpsDir)
|
||||||
.forEach(
|
.forEach(
|
||||||
inputActionSetJsonDumpFile -> {
|
inputActionSetJsonDumpFile -> {
|
||||||
String inputActionSetId = inputActionSetJsonDumpFile.getFileName().toString();
|
String inputActionSetId = inputActionSetJsonDumpFile.getFileName().toString();
|
||||||
Path inputActionSetDir = inputActionSetsDir.resolve(inputActionSetId);
|
Path inputActionSetDir = inputActionSetsDir.resolve(inputActionSetId);
|
||||||
|
|
||||||
Dataset<String> actionDS =
|
Dataset<String> actionDS = readActionsFromJsonDump(inputActionSetJsonDumpFile.toString()).cache();
|
||||||
readActionsFromJsonDump(inputActionSetJsonDumpFile.toString()).cache();
|
|
||||||
|
|
||||||
writeActionsAsJobInput(actionDS, inputActionSetId, inputActionSetDir.toString());
|
writeActionsAsJobInput(actionDS, inputActionSetId, inputActionSetDir.toString());
|
||||||
|
|
||||||
Map<String, List<String>> actionSetOafsByType =
|
Map<String, List<String>> actionSetOafsByType = actionDS
|
||||||
actionDS
|
|
||||||
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||||
.select(expr("atomic_action.*")).groupBy(col("clazz"))
|
.select(expr("atomic_action.*"))
|
||||||
.agg(collect_list(col("payload")).as("payload_list")).collectAsList().stream()
|
.groupBy(col("clazz"))
|
||||||
|
.agg(collect_list(col("payload")).as("payload_list"))
|
||||||
|
.collectAsList()
|
||||||
|
.stream()
|
||||||
.map(
|
.map(
|
||||||
row ->
|
row -> new AbstractMap.SimpleEntry<>(
|
||||||
new AbstractMap.SimpleEntry<>(
|
row.<String> getAs("clazz"),
|
||||||
row.<String>getAs("clazz"),
|
mutableSeqAsJavaList(row.<Seq<String>> getAs("payload_list"))))
|
||||||
mutableSeqAsJavaList(row.<Seq<String>>getAs("payload_list"))))
|
|
||||||
.collect(
|
.collect(
|
||||||
Collectors.toMap(
|
Collectors
|
||||||
|
.toMap(
|
||||||
AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
|
AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
|
||||||
|
|
||||||
actionSetOafsByType
|
actionSetOafsByType
|
||||||
|
@ -172,8 +180,10 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Path getInputActionSetJsonDumpsDir() {
|
private static Path getInputActionSetJsonDumpsDir() {
|
||||||
return Paths.get(
|
return Paths
|
||||||
Objects.requireNonNull(cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
.get(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
||||||
.getFile());
|
.getFile());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,12 +205,12 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
Path outputDatasetDir = outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
Path outputDatasetDir = outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
||||||
Files.exists(outputDatasetDir);
|
Files.exists(outputDatasetDir);
|
||||||
|
|
||||||
List<T> actuals =
|
List<T> actuals = readActionPayloadFromJobOutput(outputDatasetDir.toString(), clazz).collectAsList();
|
||||||
readActionPayloadFromJobOutput(outputDatasetDir.toString(), clazz).collectAsList();
|
|
||||||
actuals.sort(Comparator.comparingInt(Object::hashCode));
|
actuals.sort(Comparator.comparingInt(Object::hashCode));
|
||||||
|
|
||||||
List<T> expecteds =
|
List<T> expecteds = oafsByClassName
|
||||||
oafsByClassName.get(clazz.getCanonicalName()).stream()
|
.get(clazz.getCanonicalName())
|
||||||
|
.stream()
|
||||||
.map(json -> mapToOaf(json, clazz))
|
.map(json -> mapToOaf(json, clazz))
|
||||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
@ -214,15 +224,15 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
.read()
|
.read()
|
||||||
.parquet(path)
|
.parquet(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Row, T>)
|
(MapFunction<Row, T>) value -> OBJECT_MAPPER.readValue(value.<String> getAs("payload"), clazz),
|
||||||
value -> OBJECT_MAPPER.readValue(value.<String>getAs("payload"), clazz),
|
|
||||||
Encoders.bean(clazz));
|
Encoders.bean(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Oaf> T mapToOaf(String json, Class<T> clazz) {
|
private static <T extends Oaf> T mapToOaf(String json, Class<T> clazz) {
|
||||||
return rethrowAsRuntimeException(
|
return rethrowAsRuntimeException(
|
||||||
() -> OBJECT_MAPPER.readValue(json, clazz),
|
() -> OBJECT_MAPPER.readValue(json, clazz),
|
||||||
String.format(
|
String
|
||||||
|
.format(
|
||||||
"failed to map json to class: json=%s, class=%s", json, clazz.getCanonicalName()));
|
"failed to map json to class: json=%s, class=%s", json, clazz.getCanonicalName()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.actionmanager.promote.MergeAndGet.Strategy;
|
import static eu.dnetlib.dhp.actionmanager.promote.MergeAndGet.Strategy;
|
||||||
|
@ -126,8 +127,10 @@ public class MergeAndGetTest {
|
||||||
@Test
|
@Test
|
||||||
public void shouldThrowForOafEntityAndOafEntityButNotSubclasses() {
|
public void shouldThrowForOafEntityAndOafEntityButNotSubclasses() {
|
||||||
// given
|
// given
|
||||||
class OafEntitySub1 extends OafEntity {}
|
class OafEntitySub1 extends OafEntity {
|
||||||
class OafEntitySub2 extends OafEntity {}
|
}
|
||||||
|
class OafEntitySub2 extends OafEntity {
|
||||||
|
}
|
||||||
|
|
||||||
OafEntitySub1 a = mock(OafEntitySub1.class);
|
OafEntitySub1 a = mock(OafEntitySub1.class);
|
||||||
OafEntitySub2 b = mock(OafEntitySub2.class);
|
OafEntitySub2 b = mock(OafEntitySub2.class);
|
||||||
|
@ -166,8 +169,7 @@ public class MergeAndGetTest {
|
||||||
Relation b = mock(Relation.class);
|
Relation b = mock(Relation.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -180,8 +182,7 @@ public class MergeAndGetTest {
|
||||||
OafEntity b = mock(OafEntity.class);
|
OafEntity b = mock(OafEntity.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -194,8 +195,7 @@ public class MergeAndGetTest {
|
||||||
Result b = mock(Result.class);
|
Result b = mock(Result.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -212,8 +212,7 @@ public class MergeAndGetTest {
|
||||||
b.setLastupdatetimestamp(2L);
|
b.setLastupdatetimestamp(2L);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -228,8 +227,7 @@ public class MergeAndGetTest {
|
||||||
when(b.getLastupdatetimestamp()).thenReturn(2L);
|
when(b.getLastupdatetimestamp()).thenReturn(2L);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
Oaf x = fn.get().apply(a, b);
|
Oaf x = fn.get().apply(a, b);
|
||||||
|
@ -246,8 +244,7 @@ public class MergeAndGetTest {
|
||||||
when(b.getLastupdatetimestamp()).thenReturn(1L);
|
when(b.getLastupdatetimestamp()).thenReturn(1L);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.SELECT_NEWER_AND_GET);
|
||||||
functionFor(Strategy.SELECT_NEWER_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
Oaf x = fn.get().apply(a, b);
|
Oaf x = fn.get().apply(a, b);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
@ -27,8 +28,7 @@ import org.junit.jupiter.params.provider.Arguments;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
public class PromoteActionPayloadForGraphTableJobTest {
|
public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
private static final ClassLoader cl =
|
private static final ClassLoader cl = PromoteActionPayloadForGraphTableJobTest.class.getClassLoader();
|
||||||
PromoteActionPayloadForGraphTableJobTest.class.getClassLoader();
|
|
||||||
|
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
@ -52,8 +52,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeEach() throws IOException {
|
public void beforeEach() throws IOException {
|
||||||
workingDir =
|
workingDir = Files.createTempDirectory(PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
||||||
Files.createTempDirectory(PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
|
||||||
inputDir = workingDir.resolve("input");
|
inputDir = workingDir.resolve("input");
|
||||||
inputGraphRootDir = inputDir.resolve("graph");
|
inputGraphRootDir = inputDir.resolve("graph");
|
||||||
inputActionPayloadRootDir = inputDir.resolve("action_payload");
|
inputActionPayloadRootDir = inputDir.resolve("action_payload");
|
||||||
|
@ -81,11 +80,10 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
Class<OafEntity> actionPayloadClazz = OafEntity.class;
|
Class<OafEntity> actionPayloadClazz = OafEntity.class;
|
||||||
|
|
||||||
// when
|
// when
|
||||||
RuntimeException exception =
|
RuntimeException exception = assertThrows(
|
||||||
assertThrows(
|
|
||||||
RuntimeException.class,
|
RuntimeException.class,
|
||||||
() ->
|
() -> PromoteActionPayloadForGraphTableJob
|
||||||
PromoteActionPayloadForGraphTableJob.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged",
|
"-isSparkSessionManaged",
|
||||||
Boolean.FALSE.toString(),
|
Boolean.FALSE.toString(),
|
||||||
|
@ -104,16 +102,15 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// then
|
// then
|
||||||
String msg =
|
String msg = String
|
||||||
String.format(
|
.format(
|
||||||
"graph table class is not a subclass of action payload class: graph=%s, action=%s",
|
"graph table class is not a subclass of action payload class: graph=%s, action=%s",
|
||||||
rowClazz.getCanonicalName(), actionPayloadClazz.getCanonicalName());
|
rowClazz.getCanonicalName(), actionPayloadClazz.getCanonicalName());
|
||||||
assertTrue(exception.getMessage().contains(msg));
|
assertTrue(exception.getMessage().contains(msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest(name = "strategy: {0}, graph table: {1}, action payload: {2}")
|
@ParameterizedTest(name = "strategy: {0}, graph table: {1}, action payload: {2}")
|
||||||
@MethodSource(
|
@MethodSource("eu.dnetlib.dhp.actionmanager.promote.PromoteActionPayloadForGraphTableJobTest#promoteJobTestParams")
|
||||||
"eu.dnetlib.dhp.actionmanager.promote.PromoteActionPayloadForGraphTableJobTest#promoteJobTestParams")
|
|
||||||
public void shouldPromoteActionPayloadForGraphTable(
|
public void shouldPromoteActionPayloadForGraphTable(
|
||||||
MergeAndGet.Strategy strategy,
|
MergeAndGet.Strategy strategy,
|
||||||
Class<? extends Oaf> rowClazz,
|
Class<? extends Oaf> rowClazz,
|
||||||
|
@ -121,13 +118,12 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
throws Exception {
|
throws Exception {
|
||||||
// given
|
// given
|
||||||
Path inputGraphTableDir = createGraphTable(inputGraphRootDir, rowClazz);
|
Path inputGraphTableDir = createGraphTable(inputGraphRootDir, rowClazz);
|
||||||
Path inputActionPayloadDir =
|
Path inputActionPayloadDir = createActionPayload(inputActionPayloadRootDir, rowClazz, actionPayloadClazz);
|
||||||
createActionPayload(inputActionPayloadRootDir, rowClazz, actionPayloadClazz);
|
Path outputGraphTableDir = outputDir.resolve("graph").resolve(rowClazz.getSimpleName().toLowerCase());
|
||||||
Path outputGraphTableDir =
|
|
||||||
outputDir.resolve("graph").resolve(rowClazz.getSimpleName().toLowerCase());
|
|
||||||
|
|
||||||
// when
|
// when
|
||||||
PromoteActionPayloadForGraphTableJob.main(
|
PromoteActionPayloadForGraphTableJob
|
||||||
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged",
|
"-isSparkSessionManaged",
|
||||||
Boolean.FALSE.toString(),
|
Boolean.FALSE.toString(),
|
||||||
|
@ -148,20 +144,21 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
// then
|
// then
|
||||||
assertTrue(Files.exists(outputGraphTableDir));
|
assertTrue(Files.exists(outputGraphTableDir));
|
||||||
|
|
||||||
List<? extends Oaf> actualOutputRows =
|
List<? extends Oaf> actualOutputRows = readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz)
|
||||||
readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz).collectAsList()
|
.collectAsList()
|
||||||
.stream()
|
.stream()
|
||||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
String expectedOutputGraphTableJsonDumpPath =
|
String expectedOutputGraphTableJsonDumpPath = resultFileLocation(strategy, rowClazz, actionPayloadClazz);
|
||||||
resultFileLocation(strategy, rowClazz, actionPayloadClazz);
|
Path expectedOutputGraphTableJsonDumpFile = Paths
|
||||||
Path expectedOutputGraphTableJsonDumpFile =
|
.get(
|
||||||
Paths.get(
|
Objects
|
||||||
Objects.requireNonNull(cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
.requireNonNull(cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
||||||
.getFile());
|
.getFile());
|
||||||
List<? extends Oaf> expectedOutputRows =
|
List<? extends Oaf> expectedOutputRows = readGraphTableFromJsonDump(
|
||||||
readGraphTableFromJsonDump(expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
||||||
.collectAsList().stream()
|
.collectAsList()
|
||||||
|
.stream()
|
||||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
assertIterableEquals(expectedOutputRows, actualOutputRows);
|
assertIterableEquals(expectedOutputRows, actualOutputRows);
|
||||||
|
@ -169,7 +166,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Stream<Arguments> promoteJobTestParams() {
|
public static Stream<Arguments> promoteJobTestParams() {
|
||||||
return Stream.of(
|
return Stream
|
||||||
|
.of(
|
||||||
arguments(
|
arguments(
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||||
eu.dnetlib.dhp.schema.oaf.Dataset.class,
|
eu.dnetlib.dhp.schema.oaf.Dataset.class,
|
||||||
|
@ -196,8 +194,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
|
|
||||||
private static <G extends Oaf> Path createGraphTable(Path inputGraphRootDir, Class<G> rowClazz) {
|
private static <G extends Oaf> Path createGraphTable(Path inputGraphRootDir, Class<G> rowClazz) {
|
||||||
String inputGraphTableJsonDumpPath = inputGraphTableJsonDumpLocation(rowClazz);
|
String inputGraphTableJsonDumpPath = inputGraphTableJsonDumpLocation(rowClazz);
|
||||||
Path inputGraphTableJsonDumpFile =
|
Path inputGraphTableJsonDumpFile = Paths
|
||||||
Paths.get(Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath)).getFile());
|
.get(Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath)).getFile());
|
||||||
Dataset<G> rowDS = readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
Dataset<G> rowDS = readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
||||||
String inputGraphTableName = rowClazz.getSimpleName().toLowerCase();
|
String inputGraphTableName = rowClazz.getSimpleName().toLowerCase();
|
||||||
Path inputGraphTableDir = inputGraphRootDir.resolve(inputGraphTableName);
|
Path inputGraphTableDir = inputGraphRootDir.resolve(inputGraphTableName);
|
||||||
|
@ -206,7 +204,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String inputGraphTableJsonDumpLocation(Class<? extends Oaf> rowClazz) {
|
private static String inputGraphTableJsonDumpLocation(Class<? extends Oaf> rowClazz) {
|
||||||
return String.format(
|
return String
|
||||||
|
.format(
|
||||||
"%s/%s.json",
|
"%s/%s.json",
|
||||||
"eu/dnetlib/dhp/actionmanager/promote/input/graph", rowClazz.getSimpleName().toLowerCase());
|
"eu/dnetlib/dhp/actionmanager/promote/input/graph", rowClazz.getSimpleName().toLowerCase());
|
||||||
}
|
}
|
||||||
|
@ -227,14 +226,12 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
|
|
||||||
private static <G extends Oaf, A extends Oaf> Path createActionPayload(
|
private static <G extends Oaf, A extends Oaf> Path createActionPayload(
|
||||||
Path inputActionPayloadRootDir, Class<G> rowClazz, Class<A> actionPayloadClazz) {
|
Path inputActionPayloadRootDir, Class<G> rowClazz, Class<A> actionPayloadClazz) {
|
||||||
String inputActionPayloadJsonDumpPath =
|
String inputActionPayloadJsonDumpPath = inputActionPayloadJsonDumpLocation(rowClazz, actionPayloadClazz);
|
||||||
inputActionPayloadJsonDumpLocation(rowClazz, actionPayloadClazz);
|
Path inputActionPayloadJsonDumpFile = Paths
|
||||||
Path inputActionPayloadJsonDumpFile =
|
.get(Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath)).getFile());
|
||||||
Paths.get(Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath)).getFile());
|
Dataset<String> actionPayloadDS = readActionPayloadFromJsonDump(inputActionPayloadJsonDumpFile.toString());
|
||||||
Dataset<String> actionPayloadDS =
|
Path inputActionPayloadDir = inputActionPayloadRootDir
|
||||||
readActionPayloadFromJsonDump(inputActionPayloadJsonDumpFile.toString());
|
.resolve(actionPayloadClazz.getSimpleName().toLowerCase());
|
||||||
Path inputActionPayloadDir =
|
|
||||||
inputActionPayloadRootDir.resolve(actionPayloadClazz.getSimpleName().toLowerCase());
|
|
||||||
writeActionPayloadAsJobInput(actionPayloadDS, inputActionPayloadDir.toString());
|
writeActionPayloadAsJobInput(actionPayloadDS, inputActionPayloadDir.toString());
|
||||||
return inputActionPayloadDir;
|
return inputActionPayloadDir;
|
||||||
}
|
}
|
||||||
|
@ -242,7 +239,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
private static String inputActionPayloadJsonDumpLocation(
|
private static String inputActionPayloadJsonDumpLocation(
|
||||||
Class<? extends Oaf> rowClazz, Class<? extends Oaf> actionPayloadClazz) {
|
Class<? extends Oaf> rowClazz, Class<? extends Oaf> actionPayloadClazz) {
|
||||||
|
|
||||||
return String.format(
|
return String
|
||||||
|
.format(
|
||||||
"eu/dnetlib/dhp/actionmanager/promote/input/action_payload/%s_table/%s.json",
|
"eu/dnetlib/dhp/actionmanager/promote/input/action_payload/%s_table/%s.json",
|
||||||
rowClazz.getSimpleName().toLowerCase(), actionPayloadClazz.getSimpleName().toLowerCase());
|
rowClazz.getSimpleName().toLowerCase(), actionPayloadClazz.getSimpleName().toLowerCase());
|
||||||
}
|
}
|
||||||
|
@ -269,7 +267,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
MergeAndGet.Strategy strategy,
|
MergeAndGet.Strategy strategy,
|
||||||
Class<? extends Oaf> rowClazz,
|
Class<? extends Oaf> rowClazz,
|
||||||
Class<? extends Oaf> actionPayloadClazz) {
|
Class<? extends Oaf> actionPayloadClazz) {
|
||||||
return String.format(
|
return String
|
||||||
|
.format(
|
||||||
"eu/dnetlib/dhp/actionmanager/promote/output/graph/%s/%s/%s_action_payload/result.json",
|
"eu/dnetlib/dhp/actionmanager/promote/output/graph/%s/%s/%s_action_payload/result.json",
|
||||||
strategy.name().toLowerCase(),
|
strategy.name().toLowerCase(),
|
||||||
rowClazz.getSimpleName().toLowerCase(),
|
rowClazz.getSimpleName().toLowerCase(),
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.promote;
|
package eu.dnetlib.dhp.actionmanager.promote;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
@ -43,13 +44,14 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
@Test
|
@Test
|
||||||
public void shouldThrowWhenTableTypeIsNotSubtypeOfActionPayloadType() {
|
public void shouldThrowWhenTableTypeIsNotSubtypeOfActionPayloadType() {
|
||||||
// given
|
// given
|
||||||
class OafImpl extends Oaf {}
|
class OafImpl extends Oaf {
|
||||||
|
}
|
||||||
|
|
||||||
// when
|
// when
|
||||||
assertThrows(
|
assertThrows(
|
||||||
RuntimeException.class,
|
RuntimeException.class,
|
||||||
() ->
|
() -> PromoteActionPayloadFunctions
|
||||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
.joinGraphTableWithActionPayloadAndMerge(
|
||||||
null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,17 +63,16 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
String id2 = "id2";
|
String id2 = "id2";
|
||||||
String id3 = "id3";
|
String id3 = "id3";
|
||||||
String id4 = "id4";
|
String id4 = "id4";
|
||||||
List<OafImplSubSub> rowData =
|
List<OafImplSubSub> rowData = Arrays
|
||||||
Arrays.asList(
|
.asList(
|
||||||
createOafImplSubSub(id0),
|
createOafImplSubSub(id0),
|
||||||
createOafImplSubSub(id1),
|
createOafImplSubSub(id1),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
createOafImplSubSub(id3));
|
createOafImplSubSub(id3));
|
||||||
Dataset<OafImplSubSub> rowDS =
|
Dataset<OafImplSubSub> rowDS = spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
||||||
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
|
||||||
|
|
||||||
List<OafImplSubSub> actionPayloadData =
|
List<OafImplSubSub> actionPayloadData = Arrays
|
||||||
Arrays.asList(
|
.asList(
|
||||||
createOafImplSubSub(id1),
|
createOafImplSubSub(id1),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
|
@ -82,22 +83,20 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
createOafImplSubSub(id4),
|
createOafImplSubSub(id4),
|
||||||
createOafImplSubSub(id4),
|
createOafImplSubSub(id4),
|
||||||
createOafImplSubSub(id4));
|
createOafImplSubSub(id4));
|
||||||
Dataset<OafImplSubSub> actionPayloadDS =
|
Dataset<OafImplSubSub> actionPayloadDS = spark
|
||||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSubSub.class));
|
.createDataset(actionPayloadData, Encoders.bean(OafImplSubSub.class));
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> actionPayloadIdFn =
|
SerializableSupplier<Function<OafImplSubSub, String>> actionPayloadIdFn = () -> OafImplRoot::getId;
|
||||||
() -> OafImplRoot::getId;
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn = () -> (x,
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
y) -> {
|
||||||
() ->
|
|
||||||
(x, y) -> {
|
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
return x;
|
return x;
|
||||||
};
|
};
|
||||||
|
|
||||||
// when
|
// when
|
||||||
List<OafImplSubSub> results =
|
List<OafImplSubSub> results = PromoteActionPayloadFunctions
|
||||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
.joinGraphTableWithActionPayloadAndMerge(
|
||||||
rowDS,
|
rowDS,
|
||||||
actionPayloadDS,
|
actionPayloadDS,
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
|
@ -115,7 +114,8 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
assertEquals(3, results.stream().filter(x -> x.getId().equals(id3)).count());
|
assertEquals(3, results.stream().filter(x -> x.getId().equals(id3)).count());
|
||||||
assertEquals(4, results.stream().filter(x -> x.getId().equals(id4)).count());
|
assertEquals(4, results.stream().filter(x -> x.getId().equals(id4)).count());
|
||||||
|
|
||||||
results.forEach(
|
results
|
||||||
|
.forEach(
|
||||||
result -> {
|
result -> {
|
||||||
switch (result.getId()) {
|
switch (result.getId()) {
|
||||||
case "id0":
|
case "id0":
|
||||||
|
@ -143,17 +143,16 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
String id2 = "id2";
|
String id2 = "id2";
|
||||||
String id3 = "id3";
|
String id3 = "id3";
|
||||||
String id4 = "id4";
|
String id4 = "id4";
|
||||||
List<OafImplSubSub> rowData =
|
List<OafImplSubSub> rowData = Arrays
|
||||||
Arrays.asList(
|
.asList(
|
||||||
createOafImplSubSub(id0),
|
createOafImplSubSub(id0),
|
||||||
createOafImplSubSub(id1),
|
createOafImplSubSub(id1),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
createOafImplSubSub(id3));
|
createOafImplSubSub(id3));
|
||||||
Dataset<OafImplSubSub> rowDS =
|
Dataset<OafImplSubSub> rowDS = spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
||||||
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
|
||||||
|
|
||||||
List<OafImplSub> actionPayloadData =
|
List<OafImplSub> actionPayloadData = Arrays
|
||||||
Arrays.asList(
|
.asList(
|
||||||
createOafImplSub(id1),
|
createOafImplSub(id1),
|
||||||
createOafImplSub(id2),
|
createOafImplSub(id2),
|
||||||
createOafImplSub(id2),
|
createOafImplSub(id2),
|
||||||
|
@ -164,22 +163,19 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
createOafImplSub(id4),
|
createOafImplSub(id4),
|
||||||
createOafImplSub(id4),
|
createOafImplSub(id4),
|
||||||
createOafImplSub(id4));
|
createOafImplSub(id4));
|
||||||
Dataset<OafImplSub> actionPayloadDS =
|
Dataset<OafImplSub> actionPayloadDS = spark
|
||||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSub.class));
|
.createDataset(actionPayloadData, Encoders.bean(OafImplSub.class));
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
SerializableSupplier<Function<OafImplSub, String>> actionPayloadIdFn =
|
SerializableSupplier<Function<OafImplSub, String>> actionPayloadIdFn = () -> OafImplRoot::getId;
|
||||||
() -> OafImplRoot::getId;
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>> mergeAndGetFn = () -> (x, y) -> {
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>> mergeAndGetFn =
|
|
||||||
() ->
|
|
||||||
(x, y) -> {
|
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
return x;
|
return x;
|
||||||
};
|
};
|
||||||
|
|
||||||
// when
|
// when
|
||||||
List<OafImplSubSub> results =
|
List<OafImplSubSub> results = PromoteActionPayloadFunctions
|
||||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
.joinGraphTableWithActionPayloadAndMerge(
|
||||||
rowDS,
|
rowDS,
|
||||||
actionPayloadDS,
|
actionPayloadDS,
|
||||||
rowIdFn,
|
rowIdFn,
|
||||||
|
@ -197,7 +193,8 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
assertEquals(3, results.stream().filter(x -> x.getId().equals(id3)).count());
|
assertEquals(3, results.stream().filter(x -> x.getId().equals(id3)).count());
|
||||||
assertEquals(0, results.stream().filter(x -> x.getId().equals(id4)).count());
|
assertEquals(0, results.stream().filter(x -> x.getId().equals(id4)).count());
|
||||||
|
|
||||||
results.forEach(
|
results
|
||||||
|
.forEach(
|
||||||
result -> {
|
result -> {
|
||||||
switch (result.getId()) {
|
switch (result.getId()) {
|
||||||
case "id0":
|
case "id0":
|
||||||
|
@ -224,31 +221,28 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
String id1 = "id1";
|
String id1 = "id1";
|
||||||
String id2 = "id2";
|
String id2 = "id2";
|
||||||
String id3 = "id3";
|
String id3 = "id3";
|
||||||
List<OafImplSubSub> rowData =
|
List<OafImplSubSub> rowData = Arrays
|
||||||
Arrays.asList(
|
.asList(
|
||||||
createOafImplSubSub(id1),
|
createOafImplSubSub(id1),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
createOafImplSubSub(id2),
|
createOafImplSubSub(id2),
|
||||||
createOafImplSubSub(id3),
|
createOafImplSubSub(id3),
|
||||||
createOafImplSubSub(id3),
|
createOafImplSubSub(id3),
|
||||||
createOafImplSubSub(id3));
|
createOafImplSubSub(id3));
|
||||||
Dataset<OafImplSubSub> rowDS =
|
Dataset<OafImplSubSub> rowDS = spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
||||||
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn = () -> (x,
|
||||||
() ->
|
y) -> {
|
||||||
(x, y) -> {
|
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
return x;
|
return x;
|
||||||
};
|
};
|
||||||
SerializableSupplier<OafImplSubSub> zeroFn = OafImplSubSub::new;
|
SerializableSupplier<OafImplSubSub> zeroFn = OafImplSubSub::new;
|
||||||
SerializableSupplier<Function<OafImplSubSub, Boolean>> isNotZeroFn =
|
SerializableSupplier<Function<OafImplSubSub, Boolean>> isNotZeroFn = () -> x -> Objects.nonNull(x.getId());
|
||||||
() -> x -> Objects.nonNull(x.getId());
|
|
||||||
|
|
||||||
// when
|
// when
|
||||||
List<OafImplSubSub> results =
|
List<OafImplSubSub> results = PromoteActionPayloadFunctions
|
||||||
PromoteActionPayloadFunctions.groupGraphTableByIdAndMerge(
|
.groupGraphTableByIdAndMerge(
|
||||||
rowDS, rowIdFn, mergeAndGetFn, zeroFn, isNotZeroFn, OafImplSubSub.class)
|
rowDS, rowIdFn, mergeAndGetFn, zeroFn, isNotZeroFn, OafImplSubSub.class)
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
|
||||||
|
@ -258,7 +252,8 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
assertEquals(1, results.stream().filter(x -> x.getId().equals(id2)).count());
|
assertEquals(1, results.stream().filter(x -> x.getId().equals(id2)).count());
|
||||||
assertEquals(1, results.stream().filter(x -> x.getId().equals(id3)).count());
|
assertEquals(1, results.stream().filter(x -> x.getId().equals(id3)).count());
|
||||||
|
|
||||||
results.forEach(
|
results
|
||||||
|
.forEach(
|
||||||
result -> {
|
result -> {
|
||||||
switch (result.getId()) {
|
switch (result.getId()) {
|
||||||
case "id1":
|
case "id1":
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection;
|
package eu.dnetlib.dhp.collection;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
@ -40,20 +41,22 @@ public class GenerateNativeStoreSparkJob {
|
||||||
final LongAccumulator totalItems,
|
final LongAccumulator totalItems,
|
||||||
final LongAccumulator invalidRecords) {
|
final LongAccumulator invalidRecords) {
|
||||||
|
|
||||||
if (totalItems != null) totalItems.add(1);
|
if (totalItems != null)
|
||||||
|
totalItems.add(1);
|
||||||
try {
|
try {
|
||||||
SAXReader reader = new SAXReader();
|
SAXReader reader = new SAXReader();
|
||||||
Document document =
|
Document document = reader.read(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
|
||||||
reader.read(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
|
|
||||||
Node node = document.selectSingleNode(xpath);
|
Node node = document.selectSingleNode(xpath);
|
||||||
final String originalIdentifier = node.getText();
|
final String originalIdentifier = node.getText();
|
||||||
if (StringUtils.isBlank(originalIdentifier)) {
|
if (StringUtils.isBlank(originalIdentifier)) {
|
||||||
if (invalidRecords != null) invalidRecords.add(1);
|
if (invalidRecords != null)
|
||||||
|
invalidRecords.add(1);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return new MetadataRecord(originalIdentifier, encoding, provenance, input, dateOfCollection);
|
return new MetadataRecord(originalIdentifier, encoding, provenance, input, dateOfCollection);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
if (invalidRecords != null) invalidRecords.add(1);
|
if (invalidRecords != null)
|
||||||
|
invalidRecords.add(1);
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -61,18 +64,19 @@ public class GenerateNativeStoreSparkJob {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
final ArgumentApplicationParser parser =
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
new ArgumentApplicationParser(
|
IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
GenerateNativeStoreSparkJob.class.getResourceAsStream(
|
GenerateNativeStoreSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/collection/collection_input_parameters.json")));
|
"/eu/dnetlib/dhp/collection/collection_input_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||||
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
||||||
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
||||||
|
|
||||||
final SparkSession spark =
|
final SparkSession spark = SparkSession
|
||||||
SparkSession.builder()
|
.builder()
|
||||||
.appName("GenerateNativeStoreSparkJob")
|
.appName("GenerateNativeStoreSparkJob")
|
||||||
.master(parser.get("master"))
|
.master(parser.get("master"))
|
||||||
.getOrCreate();
|
.getOrCreate();
|
||||||
|
@ -80,20 +84,18 @@ public class GenerateNativeStoreSparkJob {
|
||||||
final Map<String, String> ongoingMap = new HashMap<>();
|
final Map<String, String> ongoingMap = new HashMap<>();
|
||||||
final Map<String, String> reportMap = new HashMap<>();
|
final Map<String, String> reportMap = new HashMap<>();
|
||||||
|
|
||||||
final boolean test =
|
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
||||||
parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
final JavaPairRDD<IntWritable, Text> inputRDD =
|
final JavaPairRDD<IntWritable, Text> inputRDD = sc
|
||||||
sc.sequenceFile(parser.get("input"), IntWritable.class, Text.class);
|
.sequenceFile(parser.get("input"), IntWritable.class, Text.class);
|
||||||
|
|
||||||
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
|
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
|
||||||
|
|
||||||
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
|
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
|
||||||
|
|
||||||
final MessageManager manager =
|
final MessageManager manager = new MessageManager(
|
||||||
new MessageManager(
|
|
||||||
parser.get("rabbitHost"),
|
parser.get("rabbitHost"),
|
||||||
parser.get("rabbitUser"),
|
parser.get("rabbitUser"),
|
||||||
parser.get("rabbitPassword"),
|
parser.get("rabbitPassword"),
|
||||||
|
@ -101,11 +103,9 @@ public class GenerateNativeStoreSparkJob {
|
||||||
false,
|
false,
|
||||||
null);
|
null);
|
||||||
|
|
||||||
final JavaRDD<MetadataRecord> mappeRDD =
|
final JavaRDD<MetadataRecord> mappeRDD = inputRDD
|
||||||
inputRDD
|
|
||||||
.map(
|
.map(
|
||||||
item ->
|
item -> parseRecord(
|
||||||
parseRecord(
|
|
||||||
item._2().toString(),
|
item._2().toString(),
|
||||||
parser.get("xpath"),
|
parser.get("xpath"),
|
||||||
parser.get("encoding"),
|
parser.get("encoding"),
|
||||||
|
@ -118,7 +118,8 @@ public class GenerateNativeStoreSparkJob {
|
||||||
|
|
||||||
ongoingMap.put("ongoing", "0");
|
ongoingMap.put("ongoing", "0");
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager
|
||||||
|
.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||||
parser.get("rabbitOngoingQueue"),
|
parser.get("rabbitOngoingQueue"),
|
||||||
|
@ -132,7 +133,8 @@ public class GenerateNativeStoreSparkJob {
|
||||||
mdStoreRecords.add(mdstore.count());
|
mdStoreRecords.add(mdstore.count());
|
||||||
ongoingMap.put("ongoing", "" + totalItems.value());
|
ongoingMap.put("ongoing", "" + totalItems.value());
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager
|
||||||
|
.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||||
parser.get("rabbitOngoingQueue"),
|
parser.get("rabbitOngoingQueue"),
|
||||||
|
@ -144,7 +146,8 @@ public class GenerateNativeStoreSparkJob {
|
||||||
reportMap.put("invalidRecords", "" + invalidRecords.value());
|
reportMap.put("invalidRecords", "" + invalidRecords.value());
|
||||||
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
|
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager
|
||||||
|
.sendMessage(
|
||||||
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||||
parser.get("rabbitReportQueue"),
|
parser.get("rabbitReportQueue"),
|
||||||
true,
|
true,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.plugin;
|
package eu.dnetlib.dhp.collection.plugin;
|
||||||
|
|
||||||
import eu.dnetlib.collector.worker.model.ApiDescriptor;
|
import eu.dnetlib.collector.worker.model.ApiDescriptor;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.plugin.oai;
|
package eu.dnetlib.dhp.collection.plugin.oai;
|
||||||
|
|
||||||
import com.google.common.base.Splitter;
|
import com.google.common.base.Splitter;
|
||||||
|
@ -33,7 +34,8 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
||||||
|
|
||||||
final List<String> sets = new ArrayList<>();
|
final List<String> sets = new ArrayList<>();
|
||||||
if (setParam != null) {
|
if (setParam != null) {
|
||||||
sets.addAll(
|
sets
|
||||||
|
.addAll(
|
||||||
Lists.newArrayList(Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
Lists.newArrayList(Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
||||||
}
|
}
|
||||||
if (sets.isEmpty()) {
|
if (sets.isEmpty()) {
|
||||||
|
@ -57,15 +59,15 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
||||||
throw new DnetCollectorException("Invalid date (YYYY-MM-DD): " + untilDate);
|
throw new DnetCollectorException("Invalid date (YYYY-MM-DD): " + untilDate);
|
||||||
}
|
}
|
||||||
|
|
||||||
final Iterator<Iterator<String>> iters =
|
final Iterator<Iterator<String>> iters = sets
|
||||||
sets.stream()
|
.stream()
|
||||||
.map(
|
.map(
|
||||||
set ->
|
set -> getOaiIteratorFactory()
|
||||||
getOaiIteratorFactory()
|
|
||||||
.newIterator(baseUrl, mdFormat, set, fromDate, untilDate))
|
.newIterator(baseUrl, mdFormat, set, fromDate, untilDate))
|
||||||
.iterator();
|
.iterator();
|
||||||
|
|
||||||
return StreamSupport.stream(
|
return StreamSupport
|
||||||
|
.stream(
|
||||||
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED), false);
|
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.plugin.oai;
|
package eu.dnetlib.dhp.collection.plugin.oai;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.collection.worker.DnetCollectorException;
|
import eu.dnetlib.dhp.collection.worker.DnetCollectorException;
|
||||||
|
@ -86,12 +87,12 @@ public class OaiIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void remove() {}
|
public void remove() {
|
||||||
|
}
|
||||||
|
|
||||||
private String firstPage() throws DnetCollectorException {
|
private String firstPage() throws DnetCollectorException {
|
||||||
try {
|
try {
|
||||||
String url =
|
String url = baseUrl + "?verb=ListRecords&metadataPrefix=" + URLEncoder.encode(mdFormat, "UTF-8");
|
||||||
baseUrl + "?verb=ListRecords&metadataPrefix=" + URLEncoder.encode(mdFormat, "UTF-8");
|
|
||||||
if (set != null && !set.isEmpty()) {
|
if (set != null && !set.isEmpty()) {
|
||||||
url += "&set=" + URLEncoder.encode(set, "UTF-8");
|
url += "&set=" + URLEncoder.encode(set, "UTF-8");
|
||||||
}
|
}
|
||||||
|
@ -154,8 +155,7 @@ public class OaiIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final Node errorNode =
|
final Node errorNode = doc.selectSingleNode("/*[local-name()='OAI-PMH']/*[local-name()='error']");
|
||||||
doc.selectSingleNode("/*[local-name()='OAI-PMH']/*[local-name()='error']");
|
|
||||||
if (errorNode != null) {
|
if (errorNode != null) {
|
||||||
final String code = errorNode.valueOf("@code");
|
final String code = errorNode.valueOf("@code");
|
||||||
if ("noRecordsMatch".equalsIgnoreCase(code.trim())) {
|
if ("noRecordsMatch".equalsIgnoreCase(code.trim())) {
|
||||||
|
@ -166,8 +166,7 @@ public class OaiIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (final Object o :
|
for (final Object o : doc.selectNodes("//*[local-name()='ListRecords']/*[local-name()='record']")) {
|
||||||
doc.selectNodes("//*[local-name()='ListRecords']/*[local-name()='record']")) {
|
|
||||||
queue.add(((Node) o).asXML());
|
queue.add(((Node) o).asXML());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.plugin.oai;
|
package eu.dnetlib.dhp.collection.plugin.oai;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.collection.worker.utils.HttpConnector;
|
import eu.dnetlib.dhp.collection.worker.utils.HttpConnector;
|
||||||
|
@ -17,7 +18,8 @@ public class OaiIteratorFactory {
|
||||||
}
|
}
|
||||||
|
|
||||||
private HttpConnector getHttpConnector() {
|
private HttpConnector getHttpConnector() {
|
||||||
if (httpConnector == null) httpConnector = new HttpConnector();
|
if (httpConnector == null)
|
||||||
|
httpConnector = new HttpConnector();
|
||||||
return httpConnector;
|
return httpConnector;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.worker;
|
package eu.dnetlib.dhp.collection.worker;
|
||||||
|
|
||||||
public class DnetCollectorException extends Exception {
|
public class DnetCollectorException extends Exception {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.worker;
|
package eu.dnetlib.dhp.collection.worker;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
@ -45,8 +46,7 @@ public class DnetCollectorWorker {
|
||||||
public void collect() throws DnetCollectorException {
|
public void collect() throws DnetCollectorException {
|
||||||
try {
|
try {
|
||||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||||
final ApiDescriptor api =
|
final ApiDescriptor api = jsonMapper.readValue(argumentParser.get("apidescriptor"), ApiDescriptor.class);
|
||||||
jsonMapper.readValue(argumentParser.get("apidescriptor"), ApiDescriptor.class);
|
|
||||||
|
|
||||||
final CollectorPlugin plugin = collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
final CollectorPlugin plugin = collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
||||||
|
|
||||||
|
@ -71,8 +71,8 @@ public class DnetCollectorWorker {
|
||||||
final Map<String, String> ongoingMap = new HashMap<>();
|
final Map<String, String> ongoingMap = new HashMap<>();
|
||||||
final Map<String, String> reportMap = new HashMap<>();
|
final Map<String, String> reportMap = new HashMap<>();
|
||||||
final AtomicInteger counter = new AtomicInteger(0);
|
final AtomicInteger counter = new AtomicInteger(0);
|
||||||
try (SequenceFile.Writer writer =
|
try (SequenceFile.Writer writer = SequenceFile
|
||||||
SequenceFile.createWriter(
|
.createWriter(
|
||||||
conf,
|
conf,
|
||||||
SequenceFile.Writer.file(hdfswritepath),
|
SequenceFile.Writer.file(hdfswritepath),
|
||||||
SequenceFile.Writer.keyClass(IntWritable.class),
|
SequenceFile.Writer.keyClass(IntWritable.class),
|
||||||
|
@ -88,9 +88,11 @@ public class DnetCollectorWorker {
|
||||||
if (counter.get() % 10 == 0) {
|
if (counter.get() % 10 == 0) {
|
||||||
try {
|
try {
|
||||||
ongoingMap.put("ongoing", "" + counter.get());
|
ongoingMap.put("ongoing", "" + counter.get());
|
||||||
log.debug(
|
log
|
||||||
|
.debug(
|
||||||
"Sending message: "
|
"Sending message: "
|
||||||
+ manager.sendMessage(
|
+ manager
|
||||||
|
.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get("workflowId"),
|
argumentParser.get("workflowId"),
|
||||||
"Collection",
|
"Collection",
|
||||||
|
@ -111,14 +113,16 @@ public class DnetCollectorWorker {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ongoingMap.put("ongoing", "" + counter.get());
|
ongoingMap.put("ongoing", "" + counter.get());
|
||||||
manager.sendMessage(
|
manager
|
||||||
|
.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get("workflowId"), "Collection", MessageType.ONGOING, ongoingMap),
|
argumentParser.get("workflowId"), "Collection", MessageType.ONGOING, ongoingMap),
|
||||||
argumentParser.get("rabbitOngoingQueue"),
|
argumentParser.get("rabbitOngoingQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
reportMap.put("collected", "" + counter.get());
|
reportMap.put("collected", "" + counter.get());
|
||||||
manager.sendMessage(
|
manager
|
||||||
|
.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
argumentParser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||||
argumentParser.get("rabbitOngoingQueue"),
|
argumentParser.get("rabbitOngoingQueue"),
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.worker;
|
package eu.dnetlib.dhp.collection.worker;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
@ -8,9 +9,9 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DnetCollectortWorkerApplication is the main class responsible to start the Dnet Collection into
|
* DnetCollectortWorkerApplication is the main class responsible to start the Dnet Collection into HDFS. This module
|
||||||
* HDFS. This module will be executed on the hadoop cluster and taking in input some parameters that
|
* will be executed on the hadoop cluster and taking in input some parameters that tells it which is the right collector
|
||||||
* tells it which is the right collector plugin to use and where store the data into HDFS path
|
* plugin to use and where store the data into HDFS path
|
||||||
*
|
*
|
||||||
* @author Sandro La Bruzzo
|
* @author Sandro La Bruzzo
|
||||||
*/
|
*/
|
||||||
|
@ -25,24 +26,23 @@ public class DnetCollectorWorkerApplication {
|
||||||
/** @param args */
|
/** @param args */
|
||||||
public static void main(final String[] args) throws Exception {
|
public static void main(final String[] args) throws Exception {
|
||||||
|
|
||||||
argumentParser =
|
argumentParser = new ArgumentApplicationParser(
|
||||||
new ArgumentApplicationParser(
|
IOUtils
|
||||||
IOUtils.toString(
|
.toString(
|
||||||
DnetCollectorWorker.class.getResourceAsStream(
|
DnetCollectorWorker.class
|
||||||
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/collector/worker/collector_parameter.json")));
|
"/eu/dnetlib/collector/worker/collector_parameter.json")));
|
||||||
argumentParser.parseArgument(args);
|
argumentParser.parseArgument(args);
|
||||||
log.info("hdfsPath =" + argumentParser.get("hdfsPath"));
|
log.info("hdfsPath =" + argumentParser.get("hdfsPath"));
|
||||||
log.info("json = " + argumentParser.get("apidescriptor"));
|
log.info("json = " + argumentParser.get("apidescriptor"));
|
||||||
final MessageManager manager =
|
final MessageManager manager = new MessageManager(
|
||||||
new MessageManager(
|
|
||||||
argumentParser.get("rabbitHost"),
|
argumentParser.get("rabbitHost"),
|
||||||
argumentParser.get("rabbitUser"),
|
argumentParser.get("rabbitUser"),
|
||||||
argumentParser.get("rabbitPassword"),
|
argumentParser.get("rabbitPassword"),
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
null);
|
null);
|
||||||
final DnetCollectorWorker worker =
|
final DnetCollectorWorker worker = new DnetCollectorWorker(collectorPluginFactory, argumentParser, manager);
|
||||||
new DnetCollectorWorker(collectorPluginFactory, argumentParser, manager);
|
|
||||||
worker.collect();
|
worker.collect();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue