forked from D-Net/dnet-hadoop
resolved conflicts
This commit is contained in:
commit
f7695e833c
|
@ -27,11 +27,9 @@ public class GenerateOoziePropertiesMojo extends AbstractMojo {
|
||||||
if (System.getProperties().containsKey(PROPERTY_NAME_WF_SOURCE_DIR)
|
if (System.getProperties().containsKey(PROPERTY_NAME_WF_SOURCE_DIR)
|
||||||
&& !System.getProperties().containsKey(PROPERTY_NAME_SANDBOX_NAME)) {
|
&& !System.getProperties().containsKey(PROPERTY_NAME_SANDBOX_NAME)) {
|
||||||
String generatedSandboxName =
|
String generatedSandboxName =
|
||||||
generateSandboxName(
|
generateSandboxName(System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
||||||
System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
|
||||||
if (generatedSandboxName != null) {
|
if (generatedSandboxName != null) {
|
||||||
System.getProperties()
|
System.getProperties().setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
||||||
.setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
|
||||||
} else {
|
} else {
|
||||||
System.out.println(
|
System.out.println(
|
||||||
"unable to generate sandbox name from path: "
|
"unable to generate sandbox name from path: "
|
||||||
|
|
|
@ -70,16 +70,16 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
protected File outputFile;
|
protected File outputFile;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true, the plugin will silently ignore any non-existent properties files, and the build
|
* If true, the plugin will silently ignore any non-existent properties files, and the build will
|
||||||
* will continue
|
* continue
|
||||||
*
|
*
|
||||||
* @parameter property="properties.quiet" default-value="true"
|
* @parameter property="properties.quiet" default-value="true"
|
||||||
*/
|
*/
|
||||||
private boolean quiet;
|
private boolean quiet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Comma separated list of characters to escape when writing property values. cr=carriage
|
* Comma separated list of characters to escape when writing property values. cr=carriage return,
|
||||||
* return, lf=linefeed, tab=tab. Any other values are taken literally.
|
* lf=linefeed, tab=tab. Any other values are taken literally.
|
||||||
*
|
*
|
||||||
* @parameter default-value="cr,lf,tab" property="properties.escapeChars"
|
* @parameter default-value="cr,lf,tab" property="properties.escapeChars"
|
||||||
*/
|
*/
|
||||||
|
@ -117,7 +117,8 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
*/
|
*/
|
||||||
private String include;
|
private String include;
|
||||||
|
|
||||||
/* (non-Javadoc)
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
* @see org.apache.maven.plugin.AbstractMojo#execute()
|
* @see org.apache.maven.plugin.AbstractMojo#execute()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
|
@ -437,8 +438,7 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
||||||
public void setIncludePropertyKeysFromFiles(String[] includePropertyKeysFromFiles) {
|
public void setIncludePropertyKeysFromFiles(String[] includePropertyKeysFromFiles) {
|
||||||
if (includePropertyKeysFromFiles != null) {
|
if (includePropertyKeysFromFiles != null) {
|
||||||
this.includePropertyKeysFromFiles =
|
this.includePropertyKeysFromFiles =
|
||||||
Arrays.copyOf(
|
Arrays.copyOf(includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
||||||
includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,8 +81,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExecuteWithProjectPropertiesExclusion(@TempDir File testFolder)
|
public void testExecuteWithProjectPropertiesExclusion(@TempDir File testFolder) throws Exception {
|
||||||
throws Exception {
|
|
||||||
// given
|
// given
|
||||||
String key = "projectPropertyKey";
|
String key = "projectPropertyKey";
|
||||||
String value = "projectPropertyValue";
|
String value = "projectPropertyValue";
|
||||||
|
@ -106,8 +105,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExecuteWithProjectPropertiesInclusion(@TempDir File testFolder)
|
public void testExecuteWithProjectPropertiesInclusion(@TempDir File testFolder) throws Exception {
|
||||||
throws Exception {
|
|
||||||
// given
|
// given
|
||||||
String key = "projectPropertyKey";
|
String key = "projectPropertyKey";
|
||||||
String value = "projectPropertyValue";
|
String value = "projectPropertyValue";
|
||||||
|
@ -131,8 +129,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExecuteIncludingPropertyKeysFromFile(@TempDir File testFolder)
|
public void testExecuteIncludingPropertyKeysFromFile(@TempDir File testFolder) throws Exception {
|
||||||
throws Exception {
|
|
||||||
// given
|
// given
|
||||||
String key = "projectPropertyKey";
|
String key = "projectPropertyKey";
|
||||||
String value = "projectPropertyValue";
|
String value = "projectPropertyValue";
|
||||||
|
@ -148,8 +145,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.store(new FileWriter(includedPropertiesFile), null);
|
includedProperties.store(new FileWriter(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(
|
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -225,8 +221,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.storeToXML(new FileOutputStream(includedPropertiesFile), null);
|
includedProperties.storeToXML(new FileOutputStream(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(
|
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
mojo.execute();
|
mojo.execute();
|
||||||
|
@ -257,8 +252,7 @@ public class WritePredefinedProjectPropertiesTest {
|
||||||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||||
includedProperties.store(new FileOutputStream(includedPropertiesFile), null);
|
includedProperties.store(new FileOutputStream(includedPropertiesFile), null);
|
||||||
|
|
||||||
mojo.setIncludePropertyKeysFromFiles(
|
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-code-style</artifactId>
|
||||||
|
<version>1.1.7-SNAPSHOT</version>
|
||||||
|
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,252 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<profiles version="10">
|
||||||
|
<profile kind="CodeFormatterProfile" name="Android" version="10">
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
|
@ -0,0 +1,727 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<profiles version="18">
|
||||||
|
<profile kind="CodeFormatterProfile" name="Android_custom" version="18">
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="32"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.text_block_indentation" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_not_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_never"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||||
|
</profile>
|
||||||
|
<profile kind="CodeFormatterProfile" name="Dnet" version="18">
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="140"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.text_block_indentation" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_not_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_if_empty"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="160"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
|
@ -0,0 +1,337 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<profiles version="13">
|
||||||
|
<profile kind="CodeFormatterProfile" name="GoogleStyle" version="13">
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_cascading_method_invocation_with_arguments.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_prefer_two_fragments" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_comment_inline_tags" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_local_variable_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_parameter" value="1040"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_type.count_dependent" value="1585|-1|1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression.count_dependent" value="16|4|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration.count_dependent" value="16|4|48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration.count_dependent" value="16|4|49"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_cascading_method_invocation_with_arguments" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.source" value="1.7"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration.count_dependent" value="16|4|48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_local_variable_annotation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants.count_dependent" value="16|5|48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation.count_dependent" value="16|4|48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_package" value="1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_type_annotation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_field_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_generic_type_arguments" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment_new_line_at_start_of_html_paragraph" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comment_prefix" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_parameter_annotation" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_method" value="1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_parameter.count_dependent" value="1040|-1|1040"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_package.count_dependent" value="1585|-1|1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.force_if_else_statement_brace" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="3"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_package_annotation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_type" value="1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.7"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_new_anonymous_class" value="20"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_local_variable.count_dependent" value="1585|-1|1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_field.count_dependent" value="1585|-1|1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="100"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="2"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_field" value="1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration.count_dependent" value="16|4|48"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_method.count_dependent" value="1585|-1|1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_member_annotation" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_local_variable" value="1585"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_generic_type_arguments.count_dependent" value="16|-1|16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration.count_dependent" value="16|5|80"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.alignment_for_for_statement" value="16"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||||
|
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
|
@ -9,6 +9,7 @@
|
||||||
<artifactId>dhp-build</artifactId>
|
<artifactId>dhp-build</artifactId>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<modules>
|
<modules>
|
||||||
|
<module>dhp-code-style</module>
|
||||||
<module>dhp-build-assembly-resources</module>
|
<module>dhp-build-assembly-resources</module>
|
||||||
<module>dhp-build-properties-maven-plugin</module>
|
<module>dhp-build-properties-maven-plugin</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
|
@ -35,9 +35,7 @@ public class ArgumentApplicationParser implements Serializable {
|
||||||
Arrays.stream(configuration)
|
Arrays.stream(configuration)
|
||||||
.map(
|
.map(
|
||||||
conf -> {
|
conf -> {
|
||||||
final Option o =
|
final Option o = new Option(conf.getParamName(), true, conf.getParamDescription());
|
||||||
new Option(
|
|
||||||
conf.getParamName(), true, conf.getParamDescription());
|
|
||||||
o.setLongOpt(conf.getParamLongName());
|
o.setLongOpt(conf.getParamLongName());
|
||||||
o.setRequired(conf.isParamRequired());
|
o.setRequired(conf.isParamRequired());
|
||||||
if (conf.isCompressed()) {
|
if (conf.isCompressed()) {
|
||||||
|
|
|
@ -12,23 +12,18 @@ public class SparkSessionSupport {
|
||||||
private SparkSessionSupport() {}
|
private SparkSessionSupport() {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a given function using SparkSession created using default builder and supplied
|
* Runs a given function using SparkSession created using default builder and supplied SparkConf.
|
||||||
* SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession
|
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
||||||
* created externally.
|
* externally.
|
||||||
*
|
*
|
||||||
* @param conf SparkConf instance
|
* @param conf SparkConf instance
|
||||||
* @param isSparkSessionManaged When true will stop SparkSession
|
* @param isSparkSessionManaged When true will stop SparkSession
|
||||||
* @param fn Consumer to be applied to constructed SparkSession
|
* @param fn Consumer to be applied to constructed SparkSession
|
||||||
*/
|
*/
|
||||||
public static void runWithSparkSession(
|
public static void runWithSparkSession(
|
||||||
SparkConf conf,
|
SparkConf conf, Boolean isSparkSessionManaged, ThrowingConsumer<SparkSession, Exception> fn) {
|
||||||
Boolean isSparkSessionManaged,
|
|
||||||
ThrowingConsumer<SparkSession, Exception> fn) {
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
c -> SparkSession.builder().config(c).getOrCreate(),
|
c -> SparkSession.builder().config(c).getOrCreate(), conf, isSparkSessionManaged, fn);
|
||||||
conf,
|
|
||||||
isSparkSessionManaged,
|
|
||||||
fn);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -41,9 +36,7 @@ public class SparkSessionSupport {
|
||||||
* @param fn Consumer to be applied to constructed SparkSession
|
* @param fn Consumer to be applied to constructed SparkSession
|
||||||
*/
|
*/
|
||||||
public static void runWithSparkHiveSession(
|
public static void runWithSparkHiveSession(
|
||||||
SparkConf conf,
|
SparkConf conf, Boolean isSparkSessionManaged, ThrowingConsumer<SparkSession, Exception> fn) {
|
||||||
Boolean isSparkSessionManaged,
|
|
||||||
ThrowingConsumer<SparkSession, Exception> fn) {
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
c -> SparkSession.builder().config(c).enableHiveSupport().getOrCreate(),
|
c -> SparkSession.builder().config(c).enableHiveSupport().getOrCreate(),
|
||||||
conf,
|
conf,
|
||||||
|
@ -52,9 +45,9 @@ public class SparkSessionSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs a given function using SparkSession created using supplied builder and supplied
|
* Runs a given function using SparkSession created using supplied builder and supplied SparkConf.
|
||||||
* SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession
|
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
||||||
* created externally.
|
* externally.
|
||||||
*
|
*
|
||||||
* @param sparkSessionBuilder Builder of SparkSession
|
* @param sparkSessionBuilder Builder of SparkSession
|
||||||
* @param conf SparkConf instance
|
* @param conf SparkConf instance
|
||||||
|
|
|
@ -32,8 +32,7 @@ public class VtdUtilityParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<String, String> getAttributes(
|
private static Map<String, String> getAttributes(final VTDNav vn, final List<String> attributes) {
|
||||||
final VTDNav vn, final List<String> attributes) {
|
|
||||||
final Map<String, String> currentAttributes = new HashMap<>();
|
final Map<String, String> currentAttributes = new HashMap<>();
|
||||||
if (attributes != null) {
|
if (attributes != null) {
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,7 @@ public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinit
|
||||||
|
|
||||||
public abstract String getName();
|
public abstract String getName();
|
||||||
|
|
||||||
public abstract Sequence doCall(XPathContext context, Sequence[] arguments)
|
public abstract Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException;
|
||||||
throws XPathException;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StructuredQName getFunctionQName() {
|
public StructuredQName getFunctionQName() {
|
||||||
|
|
|
@ -24,8 +24,7 @@ public class PickFirst extends AbstractExtensionFunction {
|
||||||
final String s1 = getValue(arguments[0]);
|
final String s1 = getValue(arguments[0]);
|
||||||
final String s2 = getValue(arguments[1]);
|
final String s2 = getValue(arguments[1]);
|
||||||
|
|
||||||
return new StringValue(
|
return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
|
||||||
StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getValue(final Sequence arg) throws XPathException {
|
private String getValue(final Sequence arg) throws XPathException {
|
||||||
|
|
|
@ -89,7 +89,8 @@ public class MessageManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
channels.values()
|
channels
|
||||||
|
.values()
|
||||||
.forEach(
|
.forEach(
|
||||||
ch -> {
|
ch -> {
|
||||||
try {
|
try {
|
||||||
|
@ -125,8 +126,7 @@ public class MessageManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void startConsumingMessage(
|
public void startConsumingMessage(
|
||||||
final String queueName, final boolean durable, final boolean autodelete)
|
final String queueName, final boolean durable, final boolean autodelete) throws Exception {
|
||||||
throws Exception {
|
|
||||||
|
|
||||||
Channel channel = createChannel(createConnection(), queueName, durable, autodelete);
|
Channel channel = createChannel(createConnection(), queueName, durable, autodelete);
|
||||||
channel.basicConsume(queueName, false, new MessageConsumer(channel, queueMessages));
|
channel.basicConsume(queueName, false, new MessageConsumer(channel, queueMessages));
|
||||||
|
|
|
@ -12,8 +12,7 @@ public class ArgumentApplicationParserTest {
|
||||||
public void testParseParameter() throws Exception {
|
public void testParseParameter() throws Exception {
|
||||||
final String jsonConfiguration =
|
final String jsonConfiguration =
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
||||||
.getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
|
||||||
assertNotNull(jsonConfiguration);
|
assertNotNull(jsonConfiguration);
|
||||||
ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
|
|
|
@ -21,8 +21,7 @@ public class HdfsSupportTest {
|
||||||
@Test
|
@Test
|
||||||
public void shouldThrowARuntimeExceptionOnError() {
|
public void shouldThrowARuntimeExceptionOnError() {
|
||||||
// when
|
// when
|
||||||
assertThrows(
|
assertThrows(RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
|
||||||
RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -53,8 +52,7 @@ public class HdfsSupportTest {
|
||||||
@Test
|
@Test
|
||||||
public void shouldThrowARuntimeExceptionOnError() {
|
public void shouldThrowARuntimeExceptionOnError() {
|
||||||
// when
|
// when
|
||||||
assertThrows(
|
assertThrows(RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
|
||||||
RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -27,9 +27,7 @@ public class MessageTest {
|
||||||
assertEquals(m1.getJobName(), m.getJobName());
|
assertEquals(m1.getJobName(), m.getJobName());
|
||||||
|
|
||||||
assertNotNull(m1.getBody());
|
assertNotNull(m1.getBody());
|
||||||
m1.getBody()
|
m1.getBody().keySet().forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
|
||||||
.keySet()
|
|
||||||
.forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
|
|
||||||
assertEquals(m1.getJobName(), m.getJobName());
|
assertEquals(m1.getJobName(), m.getJobName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,7 @@ public class ModelSupport {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the mapping between the actual entity types and the relative classes implementing
|
* Defines the mapping between the actual entity types and the relative classes implementing them
|
||||||
* them
|
|
||||||
*/
|
*/
|
||||||
public static final Map<EntityType, Class> entityTypes = Maps.newHashMap();
|
public static final Map<EntityType, Class> entityTypes = Maps.newHashMap();
|
||||||
|
|
||||||
|
@ -169,40 +168,26 @@ public class ModelSupport {
|
||||||
Optional.ofNullable(r.getRelType())
|
Optional.ofNullable(r.getRelType())
|
||||||
.map(
|
.map(
|
||||||
relType ->
|
relType ->
|
||||||
Optional.ofNullable(
|
Optional.ofNullable(r.getSubRelType())
|
||||||
r
|
|
||||||
.getSubRelType())
|
|
||||||
.map(
|
.map(
|
||||||
subRelType ->
|
subRelType ->
|
||||||
Optional
|
Optional.ofNullable(r.getRelClass())
|
||||||
.ofNullable(
|
|
||||||
r
|
|
||||||
.getRelClass())
|
|
||||||
.map(
|
.map(
|
||||||
relClass ->
|
relClass ->
|
||||||
String
|
String.join(
|
||||||
.join(
|
|
||||||
source,
|
source,
|
||||||
target,
|
target,
|
||||||
relType,
|
relType,
|
||||||
subRelType,
|
subRelType,
|
||||||
relClass))
|
relClass))
|
||||||
.orElse(
|
.orElse(
|
||||||
String
|
String.join(
|
||||||
.join(
|
|
||||||
source,
|
source,
|
||||||
target,
|
target,
|
||||||
relType,
|
relType,
|
||||||
subRelType)))
|
subRelType)))
|
||||||
.orElse(
|
.orElse(String.join(source, target, relType)))
|
||||||
String
|
.orElse(String.join(source, target)))
|
||||||
.join(
|
|
||||||
source,
|
|
||||||
target,
|
|
||||||
relType)))
|
|
||||||
.orElse(
|
|
||||||
String.join(
|
|
||||||
source, target)))
|
|
||||||
.orElse(source))
|
.orElse(source))
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,11 +76,6 @@ public class DataInfo implements Serializable {
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(
|
return Objects.hash(
|
||||||
invisible,
|
invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
|
||||||
inferred,
|
|
||||||
deletedbyinference,
|
|
||||||
trust,
|
|
||||||
inferenceprovenance,
|
|
||||||
provenanceaction);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,9 +91,7 @@ public class Dataset extends Result implements Serializable {
|
||||||
final Dataset d = (Dataset) e;
|
final Dataset d = (Dataset) e;
|
||||||
|
|
||||||
storagedate =
|
storagedate =
|
||||||
d.getStoragedate() != null && compareTrust(this, e) < 0
|
d.getStoragedate() != null && compareTrust(this, e) < 0 ? d.getStoragedate() : storagedate;
|
||||||
? d.getStoragedate()
|
|
||||||
: storagedate;
|
|
||||||
|
|
||||||
device = d.getDevice() != null && compareTrust(this, e) < 0 ? d.getDevice() : device;
|
device = d.getDevice() != null && compareTrust(this, e) < 0 ? d.getDevice() : device;
|
||||||
|
|
||||||
|
|
|
@ -385,15 +385,10 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
? d.getOfficialname()
|
? d.getOfficialname()
|
||||||
: officialname;
|
: officialname;
|
||||||
englishname =
|
englishname =
|
||||||
d.getEnglishname() != null && compareTrust(this, e) < 0
|
d.getEnglishname() != null && compareTrust(this, e) < 0 ? d.getEnglishname() : officialname;
|
||||||
? d.getEnglishname()
|
|
||||||
: officialname;
|
|
||||||
websiteurl =
|
websiteurl =
|
||||||
d.getWebsiteurl() != null && compareTrust(this, e) < 0
|
d.getWebsiteurl() != null && compareTrust(this, e) < 0 ? d.getWebsiteurl() : websiteurl;
|
||||||
? d.getWebsiteurl()
|
logourl = d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
||||||
: websiteurl;
|
|
||||||
logourl =
|
|
||||||
d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
|
||||||
contactemail =
|
contactemail =
|
||||||
d.getContactemail() != null && compareTrust(this, e) < 0
|
d.getContactemail() != null && compareTrust(this, e) < 0
|
||||||
? d.getContactemail()
|
? d.getContactemail()
|
||||||
|
@ -402,20 +397,15 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
d.getNamespaceprefix() != null && compareTrust(this, e) < 0
|
d.getNamespaceprefix() != null && compareTrust(this, e) < 0
|
||||||
? d.getNamespaceprefix()
|
? d.getNamespaceprefix()
|
||||||
: namespaceprefix;
|
: namespaceprefix;
|
||||||
latitude =
|
latitude = d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
||||||
d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
|
||||||
longitude =
|
longitude =
|
||||||
d.getLongitude() != null && compareTrust(this, e) < 0
|
d.getLongitude() != null && compareTrust(this, e) < 0 ? d.getLongitude() : longitude;
|
||||||
? d.getLongitude()
|
|
||||||
: longitude;
|
|
||||||
dateofvalidation =
|
dateofvalidation =
|
||||||
d.getDateofvalidation() != null && compareTrust(this, e) < 0
|
d.getDateofvalidation() != null && compareTrust(this, e) < 0
|
||||||
? d.getDateofvalidation()
|
? d.getDateofvalidation()
|
||||||
: dateofvalidation;
|
: dateofvalidation;
|
||||||
description =
|
description =
|
||||||
d.getDescription() != null && compareTrust(this, e) < 0
|
d.getDescription() != null && compareTrust(this, e) < 0 ? d.getDescription() : description;
|
||||||
? d.getDescription()
|
|
||||||
: description;
|
|
||||||
subjects = mergeLists(subjects, d.getSubjects());
|
subjects = mergeLists(subjects, d.getSubjects());
|
||||||
|
|
||||||
// opendoar specific fields (od*)
|
// opendoar specific fields (od*)
|
||||||
|
@ -428,9 +418,7 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
? d.getOdnumberofitemsdate()
|
? d.getOdnumberofitemsdate()
|
||||||
: odnumberofitemsdate;
|
: odnumberofitemsdate;
|
||||||
odpolicies =
|
odpolicies =
|
||||||
d.getOdpolicies() != null && compareTrust(this, e) < 0
|
d.getOdpolicies() != null && compareTrust(this, e) < 0 ? d.getOdpolicies() : odpolicies;
|
||||||
? d.getOdpolicies()
|
|
||||||
: odpolicies;
|
|
||||||
odlanguages = mergeLists(odlanguages, d.getOdlanguages());
|
odlanguages = mergeLists(odlanguages, d.getOdlanguages());
|
||||||
odcontenttypes = mergeLists(odcontenttypes, d.getOdcontenttypes());
|
odcontenttypes = mergeLists(odcontenttypes, d.getOdcontenttypes());
|
||||||
accessinfopackage = mergeLists(accessinfopackage, d.getAccessinfopackage());
|
accessinfopackage = mergeLists(accessinfopackage, d.getAccessinfopackage());
|
||||||
|
@ -482,9 +470,7 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
: datauploadrestriction;
|
: datauploadrestriction;
|
||||||
|
|
||||||
versioning =
|
versioning =
|
||||||
d.getVersioning() != null && compareTrust(this, e) < 0
|
d.getVersioning() != null && compareTrust(this, e) < 0 ? d.getVersioning() : versioning;
|
||||||
? d.getVersioning()
|
|
||||||
: versioning;
|
|
||||||
citationguidelineurl =
|
citationguidelineurl =
|
||||||
d.getCitationguidelineurl() != null && compareTrust(this, e) < 0
|
d.getCitationguidelineurl() != null && compareTrust(this, e) < 0
|
||||||
? d.getCitationguidelineurl()
|
? d.getCitationguidelineurl()
|
||||||
|
@ -496,9 +482,7 @@ public class Datasource extends OafEntity implements Serializable {
|
||||||
? d.getQualitymanagementkind()
|
? d.getQualitymanagementkind()
|
||||||
: qualitymanagementkind;
|
: qualitymanagementkind;
|
||||||
pidsystems =
|
pidsystems =
|
||||||
d.getPidsystems() != null && compareTrust(this, e) < 0
|
d.getPidsystems() != null && compareTrust(this, e) < 0 ? d.getPidsystems() : pidsystems;
|
||||||
? d.getPidsystems()
|
|
||||||
: pidsystems;
|
|
||||||
|
|
||||||
certificates =
|
certificates =
|
||||||
d.getCertificates() != null && compareTrust(this, e) < 0
|
d.getCertificates() != null && compareTrust(this, e) < 0
|
||||||
|
|
|
@ -123,15 +123,9 @@ public class Instance implements Serializable {
|
||||||
public String toComparableString() {
|
public String toComparableString() {
|
||||||
return String.format(
|
return String.format(
|
||||||
"%s::%s::%s::%s",
|
"%s::%s::%s::%s",
|
||||||
hostedby != null && hostedby.getKey() != null
|
hostedby != null && hostedby.getKey() != null ? hostedby.getKey().toLowerCase() : "",
|
||||||
? hostedby.getKey().toLowerCase()
|
accessright != null && accessright.getClassid() != null ? accessright.getClassid() : "",
|
||||||
: "",
|
instancetype != null && instancetype.getClassid() != null ? instancetype.getClassid() : "",
|
||||||
accessright != null && accessright.getClassid() != null
|
|
||||||
? accessright.getClassid()
|
|
||||||
: "",
|
|
||||||
instancetype != null && instancetype.getClassid() != null
|
|
||||||
? instancetype.getClassid()
|
|
||||||
: "",
|
|
||||||
url != null ? url : "");
|
url != null ? url : "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,8 +41,7 @@ public class KeyValue implements Serializable {
|
||||||
? ""
|
? ""
|
||||||
: String.format(
|
: String.format(
|
||||||
"%s::%s",
|
"%s::%s",
|
||||||
key != null ? key.toLowerCase() : "",
|
key != null ? key.toLowerCase() : "", value != null ? value.toLowerCase() : "");
|
||||||
value != null ? value.toLowerCase() : "");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonIgnore
|
@JsonIgnore
|
||||||
|
|
|
@ -41,8 +41,7 @@ public abstract class Oaf implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String extractTrust(Oaf e) {
|
protected String extractTrust(Oaf e) {
|
||||||
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null)
|
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null) return "0.0";
|
||||||
return "0.0";
|
|
||||||
return e.getDataInfo().getTrust();
|
return e.getDataInfo().getTrust();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -180,27 +180,19 @@ public class Organization extends OafEntity implements Serializable {
|
||||||
? o.getLegalshortname()
|
? o.getLegalshortname()
|
||||||
: legalshortname;
|
: legalshortname;
|
||||||
legalname =
|
legalname =
|
||||||
o.getLegalname() != null && compareTrust(this, e) < 0
|
o.getLegalname() != null && compareTrust(this, e) < 0 ? o.getLegalname() : legalname;
|
||||||
? o.getLegalname()
|
|
||||||
: legalname;
|
|
||||||
alternativeNames = mergeLists(o.getAlternativeNames(), alternativeNames);
|
alternativeNames = mergeLists(o.getAlternativeNames(), alternativeNames);
|
||||||
websiteurl =
|
websiteurl =
|
||||||
o.getWebsiteurl() != null && compareTrust(this, e) < 0
|
o.getWebsiteurl() != null && compareTrust(this, e) < 0 ? o.getWebsiteurl() : websiteurl;
|
||||||
? o.getWebsiteurl()
|
|
||||||
: websiteurl;
|
|
||||||
logourl = o.getLogourl() != null && compareTrust(this, e) < 0 ? o.getLogourl() : logourl;
|
logourl = o.getLogourl() != null && compareTrust(this, e) < 0 ? o.getLogourl() : logourl;
|
||||||
eclegalbody =
|
eclegalbody =
|
||||||
o.getEclegalbody() != null && compareTrust(this, e) < 0
|
o.getEclegalbody() != null && compareTrust(this, e) < 0 ? o.getEclegalbody() : eclegalbody;
|
||||||
? o.getEclegalbody()
|
|
||||||
: eclegalbody;
|
|
||||||
eclegalperson =
|
eclegalperson =
|
||||||
o.getEclegalperson() != null && compareTrust(this, e) < 0
|
o.getEclegalperson() != null && compareTrust(this, e) < 0
|
||||||
? o.getEclegalperson()
|
? o.getEclegalperson()
|
||||||
: eclegalperson;
|
: eclegalperson;
|
||||||
ecnonprofit =
|
ecnonprofit =
|
||||||
o.getEcnonprofit() != null && compareTrust(this, e) < 0
|
o.getEcnonprofit() != null && compareTrust(this, e) < 0 ? o.getEcnonprofit() : ecnonprofit;
|
||||||
? o.getEcnonprofit()
|
|
||||||
: ecnonprofit;
|
|
||||||
ecresearchorganization =
|
ecresearchorganization =
|
||||||
o.getEcresearchorganization() != null && compareTrust(this, e) < 0
|
o.getEcresearchorganization() != null && compareTrust(this, e) < 0
|
||||||
? o.getEcresearchorganization()
|
? o.getEcresearchorganization()
|
||||||
|
@ -226,9 +218,7 @@ public class Organization extends OafEntity implements Serializable {
|
||||||
? o.getEcsmevalidated()
|
? o.getEcsmevalidated()
|
||||||
: ecsmevalidated;
|
: ecsmevalidated;
|
||||||
ecnutscode =
|
ecnutscode =
|
||||||
o.getEcnutscode() != null && compareTrust(this, e) < 0
|
o.getEcnutscode() != null && compareTrust(this, e) < 0 ? o.getEcnutscode() : ecnutscode;
|
||||||
? o.getEcnutscode()
|
|
||||||
: ecnutscode;
|
|
||||||
country = o.getCountry() != null && compareTrust(this, e) < 0 ? o.getCountry() : country;
|
country = o.getCountry() != null && compareTrust(this, e) < 0 ? o.getCountry() : country;
|
||||||
mergeOAFDataInfo(o);
|
mergeOAFDataInfo(o);
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,6 @@ public class OriginDescription implements Serializable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(
|
return Objects.hash(harvestDate, altered, baseURL, identifier, datestamp, metadataNamespace);
|
||||||
harvestDate, altered, baseURL, identifier, datestamp, metadataNamespace);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -276,25 +276,19 @@ public class Project extends OafEntity implements Serializable {
|
||||||
Project p = (Project) e;
|
Project p = (Project) e;
|
||||||
|
|
||||||
websiteurl =
|
websiteurl =
|
||||||
p.getWebsiteurl() != null && compareTrust(this, e) < 0
|
p.getWebsiteurl() != null && compareTrust(this, e) < 0 ? p.getWebsiteurl() : websiteurl;
|
||||||
? p.getWebsiteurl()
|
|
||||||
: websiteurl;
|
|
||||||
code = p.getCode() != null && compareTrust(this, e) < 0 ? p.getCode() : code;
|
code = p.getCode() != null && compareTrust(this, e) < 0 ? p.getCode() : code;
|
||||||
acronym = p.getAcronym() != null && compareTrust(this, e) < 0 ? p.getAcronym() : acronym;
|
acronym = p.getAcronym() != null && compareTrust(this, e) < 0 ? p.getAcronym() : acronym;
|
||||||
title = p.getTitle() != null && compareTrust(this, e) < 0 ? p.getTitle() : title;
|
title = p.getTitle() != null && compareTrust(this, e) < 0 ? p.getTitle() : title;
|
||||||
startdate =
|
startdate =
|
||||||
p.getStartdate() != null && compareTrust(this, e) < 0
|
p.getStartdate() != null && compareTrust(this, e) < 0 ? p.getStartdate() : startdate;
|
||||||
? p.getStartdate()
|
|
||||||
: startdate;
|
|
||||||
enddate = p.getEnddate() != null && compareTrust(this, e) < 0 ? p.getEnddate() : enddate;
|
enddate = p.getEnddate() != null && compareTrust(this, e) < 0 ? p.getEnddate() : enddate;
|
||||||
callidentifier =
|
callidentifier =
|
||||||
p.getCallidentifier() != null && compareTrust(this, e) < 0
|
p.getCallidentifier() != null && compareTrust(this, e) < 0
|
||||||
? p.getCallidentifier()
|
? p.getCallidentifier()
|
||||||
: callidentifier;
|
: callidentifier;
|
||||||
keywords =
|
keywords = p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
||||||
p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
duration = p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
||||||
duration =
|
|
||||||
p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
|
||||||
ecsc39 = p.getEcsc39() != null && compareTrust(this, e) < 0 ? p.getEcsc39() : ecsc39;
|
ecsc39 = p.getEcsc39() != null && compareTrust(this, e) < 0 ? p.getEcsc39() : ecsc39;
|
||||||
oamandatepublications =
|
oamandatepublications =
|
||||||
p.getOamandatepublications() != null && compareTrust(this, e) < 0
|
p.getOamandatepublications() != null && compareTrust(this, e) < 0
|
||||||
|
@ -311,13 +305,9 @@ public class Project extends OafEntity implements Serializable {
|
||||||
? p.getContracttype()
|
? p.getContracttype()
|
||||||
: contracttype;
|
: contracttype;
|
||||||
optional1 =
|
optional1 =
|
||||||
p.getOptional1() != null && compareTrust(this, e) < 0
|
p.getOptional1() != null && compareTrust(this, e) < 0 ? p.getOptional1() : optional1;
|
||||||
? p.getOptional1()
|
|
||||||
: optional1;
|
|
||||||
optional2 =
|
optional2 =
|
||||||
p.getOptional2() != null && compareTrust(this, e) < 0
|
p.getOptional2() != null && compareTrust(this, e) < 0 ? p.getOptional2() : optional2;
|
||||||
? p.getOptional2()
|
|
||||||
: optional2;
|
|
||||||
jsonextrainfo =
|
jsonextrainfo =
|
||||||
p.getJsonextrainfo() != null && compareTrust(this, e) < 0
|
p.getJsonextrainfo() != null && compareTrust(this, e) < 0
|
||||||
? p.getJsonextrainfo()
|
? p.getJsonextrainfo()
|
||||||
|
@ -327,9 +317,7 @@ public class Project extends OafEntity implements Serializable {
|
||||||
? p.getContactfullname()
|
? p.getContactfullname()
|
||||||
: contactfullname;
|
: contactfullname;
|
||||||
contactfax =
|
contactfax =
|
||||||
p.getContactfax() != null && compareTrust(this, e) < 0
|
p.getContactfax() != null && compareTrust(this, e) < 0 ? p.getContactfax() : contactfax;
|
||||||
? p.getContactfax()
|
|
||||||
: contactfax;
|
|
||||||
contactphone =
|
contactphone =
|
||||||
p.getContactphone() != null && compareTrust(this, e) < 0
|
p.getContactphone() != null && compareTrust(this, e) < 0
|
||||||
? p.getContactphone()
|
? p.getContactphone()
|
||||||
|
@ -339,12 +327,9 @@ public class Project extends OafEntity implements Serializable {
|
||||||
? p.getContactemail()
|
? p.getContactemail()
|
||||||
: contactemail;
|
: contactemail;
|
||||||
summary = p.getSummary() != null && compareTrust(this, e) < 0 ? p.getSummary() : summary;
|
summary = p.getSummary() != null && compareTrust(this, e) < 0 ? p.getSummary() : summary;
|
||||||
currency =
|
currency = p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
||||||
p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
|
||||||
totalcost =
|
totalcost =
|
||||||
p.getTotalcost() != null && compareTrust(this, e) < 0
|
p.getTotalcost() != null && compareTrust(this, e) < 0 ? p.getTotalcost() : totalcost;
|
||||||
? p.getTotalcost()
|
|
||||||
: totalcost;
|
|
||||||
fundedamount =
|
fundedamount =
|
||||||
p.getFundedamount() != null && compareTrust(this, e) < 0
|
p.getFundedamount() != null && compareTrust(this, e) < 0
|
||||||
? p.getFundedamount()
|
? p.getFundedamount()
|
||||||
|
|
|
@ -56,12 +56,8 @@ public class DLIDataset extends Dataset {
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
@ -72,12 +68,8 @@ public class DLIDataset extends Dataset {
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,12 +54,8 @@ public class DLIPublication extends Publication implements Serializable {
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
@ -70,12 +66,8 @@ public class DLIPublication extends Publication implements Serializable {
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,12 +82,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
||||||
if (a != null)
|
if (a != null)
|
||||||
a.forEach(
|
a.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
@ -98,12 +94,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
||||||
if (b != null)
|
if (b != null)
|
||||||
b.forEach(
|
b.forEach(
|
||||||
p -> {
|
p -> {
|
||||||
if (p != null
|
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||||
&& StringUtils.isNotBlank(p.getId())
|
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||||
&& result.containsKey(p.getId())) {
|
|
||||||
if ("incomplete"
|
|
||||||
.equalsIgnoreCase(
|
|
||||||
result.get(p.getId()).getCompletionStatus())
|
|
||||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||||
result.put(p.getId(), p);
|
result.put(p.getId(), p);
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,8 +65,7 @@ public class MigrateActionSet {
|
||||||
|
|
||||||
ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
||||||
|
|
||||||
Configuration conf =
|
Configuration conf = getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
||||||
getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
|
||||||
FileSystem targetFS = FileSystem.get(conf);
|
FileSystem targetFS = FileSystem.get(conf);
|
||||||
|
|
||||||
Configuration sourceConf =
|
Configuration sourceConf =
|
||||||
|
@ -99,13 +98,7 @@ public class MigrateActionSet {
|
||||||
final String actionSetDirectory = pathQ.pollLast();
|
final String actionSetDirectory = pathQ.pollLast();
|
||||||
|
|
||||||
final Path targetPath =
|
final Path targetPath =
|
||||||
new Path(
|
new Path(targetNN + workDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawSet);
|
||||||
targetNN
|
|
||||||
+ workDir
|
|
||||||
+ SEPARATOR
|
|
||||||
+ actionSetDirectory
|
|
||||||
+ SEPARATOR
|
|
||||||
+ rawSet);
|
|
||||||
|
|
||||||
log.info("using TARGET PATH: {}", targetPath);
|
log.info("using TARGET PATH: {}", targetPath);
|
||||||
|
|
||||||
|
@ -114,12 +107,7 @@ public class MigrateActionSet {
|
||||||
targetFS.delete(targetPath, true);
|
targetFS.delete(targetPath, true);
|
||||||
}
|
}
|
||||||
runDistcp(
|
runDistcp(
|
||||||
distcp_num_maps,
|
distcp_num_maps, distcp_memory_mb, distcp_task_timeout, conf, source, targetPath);
|
||||||
distcp_memory_mb,
|
|
||||||
distcp_task_timeout,
|
|
||||||
conf,
|
|
||||||
source,
|
|
||||||
targetPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
targetPaths.add(targetPath);
|
targetPaths.add(targetPath);
|
||||||
|
@ -128,8 +116,7 @@ public class MigrateActionSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
props.setProperty(
|
props.setProperty(
|
||||||
TARGET_PATHS,
|
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||||
targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
|
||||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||||
|
|
||||||
try (OutputStream os = new FileOutputStream(file)) {
|
try (OutputStream os = new FileOutputStream(file)) {
|
||||||
|
|
|
@ -48,9 +48,7 @@ public class ProtoConverter implements Serializable {
|
||||||
rel.setRelClass(r.getRelClass());
|
rel.setRelClass(r.getRelClass());
|
||||||
rel.setCollectedfrom(
|
rel.setCollectedfrom(
|
||||||
r.getCollectedfromCount() > 0
|
r.getCollectedfromCount() > 0
|
||||||
? r.getCollectedfromList().stream()
|
? r.getCollectedfromList().stream().map(kv -> mapKV(kv)).collect(Collectors.toList())
|
||||||
.map(kv -> mapKV(kv))
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
: null);
|
: null);
|
||||||
return rel;
|
return rel;
|
||||||
}
|
}
|
||||||
|
@ -77,9 +75,7 @@ public class ProtoConverter implements Serializable {
|
||||||
|
|
||||||
final ResultProtos.Result r = oaf.getEntity().getResult();
|
final ResultProtos.Result r = oaf.getEntity().getResult();
|
||||||
if (r.getInstanceCount() > 0) {
|
if (r.getInstanceCount() > 0) {
|
||||||
return r.getInstanceList().stream()
|
return r.getInstanceList().stream().map(i -> convertInstance(i)).collect(Collectors.toList());
|
||||||
.map(i -> convertInstance(i))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
}
|
||||||
return Lists.newArrayList();
|
return Lists.newArrayList();
|
||||||
}
|
}
|
||||||
|
@ -130,8 +126,7 @@ public class ProtoConverter implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Datasource convertDataSource(OafProtos.Oaf oaf) {
|
private static Datasource convertDataSource(OafProtos.Oaf oaf) {
|
||||||
final DatasourceProtos.Datasource.Metadata m =
|
final DatasourceProtos.Datasource.Metadata m = oaf.getEntity().getDatasource().getMetadata();
|
||||||
oaf.getEntity().getDatasource().getMetadata();
|
|
||||||
final Datasource datasource = setOaf(new Datasource(), oaf);
|
final Datasource datasource = setOaf(new Datasource(), oaf);
|
||||||
setEntity(datasource, oaf);
|
setEntity(datasource, oaf);
|
||||||
datasource.setAccessinfopackage(
|
datasource.setAccessinfopackage(
|
||||||
|
@ -171,9 +166,7 @@ public class ProtoConverter implements Serializable {
|
||||||
datasource.setOpenairecompatibility(mapQualifier(m.getOpenairecompatibility()));
|
datasource.setOpenairecompatibility(mapQualifier(m.getOpenairecompatibility()));
|
||||||
datasource.setPidsystems(mapStringField(m.getPidsystems()));
|
datasource.setPidsystems(mapStringField(m.getPidsystems()));
|
||||||
datasource.setPolicies(
|
datasource.setPolicies(
|
||||||
m.getPoliciesList().stream()
|
m.getPoliciesList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||||
.map(ProtoConverter::mapKV)
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
datasource.setQualitymanagementkind(mapStringField(m.getQualitymanagementkind()));
|
datasource.setQualitymanagementkind(mapStringField(m.getQualitymanagementkind()));
|
||||||
datasource.setReleaseenddate(mapStringField(m.getReleaseenddate()));
|
datasource.setReleaseenddate(mapStringField(m.getReleaseenddate()));
|
||||||
datasource.setServiceprovider(mapBoolField(m.getServiceprovider()));
|
datasource.setServiceprovider(mapBoolField(m.getServiceprovider()));
|
||||||
|
@ -218,9 +211,7 @@ public class ProtoConverter implements Serializable {
|
||||||
project.setTitle(mapStringField(m.getTitle()));
|
project.setTitle(mapStringField(m.getTitle()));
|
||||||
project.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
project.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
||||||
project.setFundingtree(
|
project.setFundingtree(
|
||||||
m.getFundingtreeList().stream()
|
m.getFundingtreeList().stream().map(f -> mapStringField(f)).collect(Collectors.toList()));
|
||||||
.map(f -> mapStringField(f))
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
project.setJsonextrainfo(mapStringField(m.getJsonextrainfo()));
|
project.setJsonextrainfo(mapStringField(m.getJsonextrainfo()));
|
||||||
project.setSummary(mapStringField(m.getSummary()));
|
project.setSummary(mapStringField(m.getSummary()));
|
||||||
project.setOptional1(mapStringField(m.getOptional1()));
|
project.setOptional1(mapStringField(m.getOptional1()));
|
||||||
|
@ -278,9 +269,7 @@ public class ProtoConverter implements Serializable {
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
otherResearchProducts.setTool(
|
otherResearchProducts.setTool(
|
||||||
m.getToolList().stream()
|
m.getToolList().stream().map(ProtoConverter::mapStringField).collect(Collectors.toList()));
|
||||||
.map(ProtoConverter::mapStringField)
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
|
|
||||||
return otherResearchProducts;
|
return otherResearchProducts;
|
||||||
}
|
}
|
||||||
|
@ -326,9 +315,7 @@ public class ProtoConverter implements Serializable {
|
||||||
entity.setId(e.getId());
|
entity.setId(e.getId());
|
||||||
entity.setOriginalId(e.getOriginalIdList());
|
entity.setOriginalId(e.getOriginalIdList());
|
||||||
entity.setCollectedfrom(
|
entity.setCollectedfrom(
|
||||||
e.getCollectedfromList().stream()
|
e.getCollectedfromList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||||
.map(ProtoConverter::mapKV)
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
entity.setPid(
|
entity.setPid(
|
||||||
e.getPidList().stream()
|
e.getPidList().stream()
|
||||||
.map(ProtoConverter::mapStructuredProperty)
|
.map(ProtoConverter::mapStructuredProperty)
|
||||||
|
@ -346,9 +333,7 @@ public class ProtoConverter implements Serializable {
|
||||||
// setting Entity fields
|
// setting Entity fields
|
||||||
final ResultProtos.Result.Metadata m = oaf.getEntity().getResult().getMetadata();
|
final ResultProtos.Result.Metadata m = oaf.getEntity().getResult().getMetadata();
|
||||||
entity.setAuthor(
|
entity.setAuthor(
|
||||||
m.getAuthorList().stream()
|
m.getAuthorList().stream().map(ProtoConverter::mapAuthor).collect(Collectors.toList()));
|
||||||
.map(ProtoConverter::mapAuthor)
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
entity.setResulttype(mapQualifier(m.getResulttype()));
|
entity.setResulttype(mapQualifier(m.getResulttype()));
|
||||||
entity.setLanguage(mapQualifier(m.getLanguage()));
|
entity.setLanguage(mapQualifier(m.getLanguage()));
|
||||||
entity.setCountry(
|
entity.setCountry(
|
||||||
|
@ -396,12 +381,9 @@ public class ProtoConverter implements Serializable {
|
||||||
.map(ProtoConverter::mapStringField)
|
.map(ProtoConverter::mapStringField)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
entity.setContext(
|
entity.setContext(
|
||||||
m.getContextList().stream()
|
m.getContextList().stream().map(ProtoConverter::mapContext).collect(Collectors.toList()));
|
||||||
.map(ProtoConverter::mapContext)
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
|
|
||||||
entity.setBestaccessright(
|
entity.setBestaccessright(getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
||||||
getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
|
||||||
|
|
||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,20 +68,17 @@ public class TransformActions implements Serializable {
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf, isSparkSessionManaged, spark -> transformActions(inputPaths, targetBaseDir, spark));
|
||||||
isSparkSessionManaged,
|
|
||||||
spark -> transformActions(inputPaths, targetBaseDir, spark));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void transformActions(
|
private static void transformActions(String inputPaths, String targetBaseDir, SparkSession spark)
|
||||||
String inputPaths, String targetBaseDir, SparkSession spark) throws IOException {
|
throws IOException {
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration());
|
final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration());
|
||||||
|
|
||||||
for (String sourcePath : Lists.newArrayList(Splitter.on(",").split(inputPaths))) {
|
for (String sourcePath : Lists.newArrayList(Splitter.on(",").split(inputPaths))) {
|
||||||
|
|
||||||
LinkedList<String> pathQ =
|
LinkedList<String> pathQ = Lists.newLinkedList(Splitter.on(SEPARATOR).split(sourcePath));
|
||||||
Lists.newLinkedList(Splitter.on(SEPARATOR).split(sourcePath));
|
|
||||||
|
|
||||||
final String rawset = pathQ.pollLast();
|
final String rawset = pathQ.pollLast();
|
||||||
final String actionSetDirectory = pathQ.pollLast();
|
final String actionSetDirectory = pathQ.pollLast();
|
||||||
|
@ -98,17 +95,11 @@ public class TransformActions implements Serializable {
|
||||||
log.info("transforming actions from '{}' to '{}'", sourcePath, targetDirectory);
|
log.info("transforming actions from '{}' to '{}'", sourcePath, targetDirectory);
|
||||||
|
|
||||||
sc.sequenceFile(sourcePath, Text.class, Text.class)
|
sc.sequenceFile(sourcePath, Text.class, Text.class)
|
||||||
.map(
|
.map(a -> eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(a._2().toString()))
|
||||||
a ->
|
|
||||||
eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(
|
|
||||||
a._2().toString()))
|
|
||||||
.map(TransformActions::doTransform)
|
.map(TransformActions::doTransform)
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
a ->
|
a -> new Tuple2<>(a.getClazz().toString(), OBJECT_MAPPER.writeValueAsString(a)))
|
||||||
new Tuple2<>(
|
|
||||||
a.getClazz().toString(),
|
|
||||||
OBJECT_MAPPER.writeValueAsString(a)))
|
|
||||||
.mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2())))
|
.mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2())))
|
||||||
.saveAsNewAPIHadoopFile(
|
.saveAsNewAPIHadoopFile(
|
||||||
targetDirectory.toString(),
|
targetDirectory.toString(),
|
||||||
|
@ -139,20 +130,14 @@ public class TransformActions implements Serializable {
|
||||||
return new AtomicAction<>(Project.class, (Project) oaf);
|
return new AtomicAction<>(Project.class, (Project) oaf);
|
||||||
case result:
|
case result:
|
||||||
final String resulttypeid =
|
final String resulttypeid =
|
||||||
proto_oaf
|
proto_oaf.getEntity().getResult().getMetadata().getResulttype().getClassid();
|
||||||
.getEntity()
|
|
||||||
.getResult()
|
|
||||||
.getMetadata()
|
|
||||||
.getResulttype()
|
|
||||||
.getClassid();
|
|
||||||
switch (resulttypeid) {
|
switch (resulttypeid) {
|
||||||
case "publication":
|
case "publication":
|
||||||
return new AtomicAction<>(Publication.class, (Publication) oaf);
|
return new AtomicAction<>(Publication.class, (Publication) oaf);
|
||||||
case "software":
|
case "software":
|
||||||
return new AtomicAction<>(Software.class, (Software) oaf);
|
return new AtomicAction<>(Software.class, (Software) oaf);
|
||||||
case "other":
|
case "other":
|
||||||
return new AtomicAction<>(
|
return new AtomicAction<>(OtherResearchProduct.class, (OtherResearchProduct) oaf);
|
||||||
OtherResearchProduct.class, (OtherResearchProduct) oaf);
|
|
||||||
case "dataset":
|
case "dataset":
|
||||||
return new AtomicAction<>(Dataset.class, (Dataset) oaf);
|
return new AtomicAction<>(Dataset.class, (Dataset) oaf);
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -29,16 +29,13 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
private static final StructType KV_SCHEMA =
|
private static final StructType KV_SCHEMA =
|
||||||
StructType$.MODULE$.apply(
|
StructType$.MODULE$.apply(
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$.apply("key", DataTypes.StringType, false, Metadata.empty()),
|
||||||
"key", DataTypes.StringType, false, Metadata.empty()),
|
StructField$.MODULE$.apply("value", DataTypes.StringType, false, Metadata.empty())));
|
||||||
StructField$.MODULE$.apply(
|
|
||||||
"value", DataTypes.StringType, false, Metadata.empty())));
|
|
||||||
|
|
||||||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
private static final StructType ATOMIC_ACTION_SCHEMA =
|
||||||
StructType$.MODULE$.apply(
|
StructType$.MODULE$.apply(
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||||
"clazz", DataTypes.StringType, false, Metadata.empty()),
|
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$.apply(
|
||||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
|
@ -101,14 +98,10 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
private static void readAndWriteActionSetsFromPaths(
|
private static void readAndWriteActionSetsFromPaths(
|
||||||
SparkSession spark, List<String> inputActionSetPaths, String outputPath) {
|
SparkSession spark, List<String> inputActionSetPaths, String outputPath) {
|
||||||
inputActionSetPaths.stream()
|
inputActionSetPaths.stream()
|
||||||
.filter(
|
.filter(path -> HdfsSupport.exists(path, spark.sparkContext().hadoopConfiguration()))
|
||||||
path ->
|
|
||||||
HdfsSupport.exists(
|
|
||||||
path, spark.sparkContext().hadoopConfiguration()))
|
|
||||||
.forEach(
|
.forEach(
|
||||||
inputActionSetPath -> {
|
inputActionSetPath -> {
|
||||||
Dataset<Row> actionDS =
|
Dataset<Row> actionDS = readActionSetFromPath(spark, inputActionSetPath);
|
||||||
readActionSetFromPath(spark, inputActionSetPath);
|
|
||||||
saveActions(actionDS, outputPath);
|
saveActions(actionDS, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -122,7 +115,8 @@ public class PartitionActionSetsByPayloadTypeJob {
|
||||||
sc.sequenceFile(path, Text.class, Text.class)
|
sc.sequenceFile(path, Text.class, Text.class)
|
||||||
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
||||||
|
|
||||||
return spark.createDataFrame(rdd, KV_SCHEMA)
|
return spark
|
||||||
|
.createDataFrame(rdd, KV_SCHEMA)
|
||||||
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||||
.select(expr("atomic_action.*"));
|
.select(expr("atomic_action.*"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,30 +128,28 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
SparkSession spark, String path, Class<G> rowClazz) {
|
SparkSession spark, String path, Class<G> rowClazz) {
|
||||||
logger.info("Reading graph table from path: {}", path);
|
logger.info("Reading graph table from path: {}", path);
|
||||||
|
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.textFile(path)
|
.textFile(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, G>) value -> OBJECT_MAPPER.readValue(value, rowClazz),
|
(MapFunction<String, G>) value -> OBJECT_MAPPER.readValue(value, rowClazz),
|
||||||
Encoders.bean(rowClazz));
|
Encoders.bean(rowClazz));
|
||||||
|
|
||||||
/*
|
/*
|
||||||
return spark
|
* return spark .read() .parquet(path) .as(Encoders.bean(rowClazz));
|
||||||
.read()
|
|
||||||
.parquet(path)
|
|
||||||
.as(Encoders.bean(rowClazz));
|
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <A extends Oaf> Dataset<A> readActionPayload(
|
private static <A extends Oaf> Dataset<A> readActionPayload(
|
||||||
SparkSession spark, String path, Class<A> actionPayloadClazz) {
|
SparkSession spark, String path, Class<A> actionPayloadClazz) {
|
||||||
logger.info("Reading action payload from path: {}", path);
|
logger.info("Reading action payload from path: {}", path);
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.parquet(path)
|
.parquet(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Row, A>)
|
(MapFunction<Row, A>)
|
||||||
value ->
|
value ->
|
||||||
OBJECT_MAPPER.readValue(
|
OBJECT_MAPPER.readValue(value.<String>getAs("payload"), actionPayloadClazz),
|
||||||
value.<String>getAs("payload"), actionPayloadClazz),
|
|
||||||
Encoders.bean(actionPayloadClazz));
|
Encoders.bean(actionPayloadClazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,8 +168,7 @@ public class PromoteActionPayloadForGraphTableJob {
|
||||||
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
||||||
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn =
|
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn =
|
||||||
MergeAndGet.functionFor(strategy);
|
MergeAndGet.functionFor(strategy);
|
||||||
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn =
|
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(strategy);
|
||||||
MergeAndGet.functionFor(strategy);
|
|
||||||
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
||||||
SerializableSupplier<Function<G, Boolean>> isNotZeroFn =
|
SerializableSupplier<Function<G, Boolean>> isNotZeroFn =
|
||||||
PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSource;
|
PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSource;
|
||||||
|
|
|
@ -23,8 +23,8 @@ public class PromoteActionPayloadFunctions {
|
||||||
private PromoteActionPayloadFunctions() {}
|
private PromoteActionPayloadFunctions() {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Joins dataset representing graph table with dataset representing action payload using
|
* Joins dataset representing graph table with dataset representing action payload using supplied
|
||||||
* supplied functions.
|
* functions.
|
||||||
*
|
*
|
||||||
* @param rowDS Dataset representing graph table
|
* @param rowDS Dataset representing graph table
|
||||||
* @param actionPayloadDS Dataset representing action payload
|
* @param actionPayloadDS Dataset representing action payload
|
||||||
|
@ -62,30 +62,21 @@ public class PromoteActionPayloadFunctions {
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>)
|
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>)
|
||||||
value -> {
|
value -> {
|
||||||
Optional<G> rowOpt =
|
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
||||||
Optional.ofNullable(value._1()).map(Tuple2::_2);
|
Optional<A> actionPayloadOpt = Optional.ofNullable(value._2()).map(Tuple2::_2);
|
||||||
Optional<A> actionPayloadOpt =
|
return rowOpt
|
||||||
Optional.ofNullable(value._2()).map(Tuple2::_2);
|
.map(
|
||||||
return rowOpt.map(
|
|
||||||
row ->
|
row ->
|
||||||
actionPayloadOpt
|
actionPayloadOpt
|
||||||
.map(
|
.map(
|
||||||
actionPayload ->
|
actionPayload ->
|
||||||
mergeAndGetFn
|
mergeAndGetFn.get().apply(row, actionPayload))
|
||||||
.get()
|
|
||||||
.apply(
|
|
||||||
row,
|
|
||||||
actionPayload))
|
|
||||||
.orElse(row))
|
.orElse(row))
|
||||||
.orElseGet(
|
.orElseGet(
|
||||||
() ->
|
() ->
|
||||||
actionPayloadOpt
|
actionPayloadOpt
|
||||||
.filter(
|
.filter(
|
||||||
actionPayload ->
|
actionPayload -> actionPayload.getClass().equals(rowClazz))
|
||||||
actionPayload
|
|
||||||
.getClass()
|
|
||||||
.equals(
|
|
||||||
rowClazz))
|
|
||||||
.map(rowClazz::cast)
|
.map(rowClazz::cast)
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
},
|
},
|
||||||
|
@ -96,8 +87,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
private static <T extends Oaf> Dataset<Tuple2<String, T>> mapToTupleWithId(
|
private static <T extends Oaf> Dataset<Tuple2<String, T>> mapToTupleWithId(
|
||||||
Dataset<T> ds, SerializableSupplier<Function<T, String>> idFn, Class<T> clazz) {
|
Dataset<T> ds, SerializableSupplier<Function<T, String>> idFn, Class<T> clazz) {
|
||||||
return ds.map(
|
return ds.map(
|
||||||
(MapFunction<T, Tuple2<String, T>>)
|
(MapFunction<T, Tuple2<String, T>>) value -> new Tuple2<>(idFn.get().apply(value), value),
|
||||||
value -> new Tuple2<>(idFn.get().apply(value), value),
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,8 +112,8 @@ public class PromoteActionPayloadFunctions {
|
||||||
Class<G> rowClazz) {
|
Class<G> rowClazz) {
|
||||||
TypedColumn<G, G> aggregator =
|
TypedColumn<G, G> aggregator =
|
||||||
new TableAggregator<>(zeroFn, mergeAndGetFn, isNotZeroFn, rowClazz).toColumn();
|
new TableAggregator<>(zeroFn, mergeAndGetFn, isNotZeroFn, rowClazz).toColumn();
|
||||||
return rowDS.groupByKey(
|
return rowDS
|
||||||
(MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
.groupByKey((MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
||||||
.agg(aggregator)
|
.agg(aggregator)
|
||||||
.map((MapFunction<Tuple2<String, G>, G>) Tuple2::_2, Encoders.kryo(rowClazz));
|
.map((MapFunction<Tuple2<String, G>, G>) Tuple2::_2, Encoders.kryo(rowClazz));
|
||||||
}
|
}
|
||||||
|
@ -174,8 +164,7 @@ public class PromoteActionPayloadFunctions {
|
||||||
} else if (!isNotZero.apply(left) && isNotZero.apply(right)) {
|
} else if (!isNotZero.apply(left) && isNotZero.apply(right)) {
|
||||||
return right;
|
return right;
|
||||||
}
|
}
|
||||||
throw new RuntimeException(
|
throw new RuntimeException("internal aggregation error: left and right objects are zero");
|
||||||
"internal aggregation error: left and right objects are zero");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -48,8 +48,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
private static final StructType ATOMIC_ACTION_SCHEMA =
|
||||||
StructType$.MODULE$.apply(
|
StructType$.MODULE$.apply(
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||||
"clazz", DataTypes.StringType, false, Metadata.empty()),
|
|
||||||
StructField$.MODULE$.apply(
|
StructField$.MODULE$.apply(
|
||||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
|
@ -75,8 +74,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
@Mock private ISClient isClient;
|
@Mock private ISClient isClient;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir)
|
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir) throws Exception {
|
||||||
throws Exception {
|
|
||||||
// given
|
// given
|
||||||
Path inputActionSetsBaseDir = workingDir.resolve("input").resolve("action_sets");
|
Path inputActionSetsBaseDir = workingDir.resolve("input").resolve("action_sets");
|
||||||
Path outputDir = workingDir.resolve("output");
|
Path outputDir = workingDir.resolve("output");
|
||||||
|
@ -93,7 +91,9 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
job.setIsClient(isClient);
|
job.setIsClient(isClient);
|
||||||
job.run(
|
job.run(
|
||||||
Boolean.FALSE,
|
Boolean.FALSE,
|
||||||
"", // it can be empty we're mocking the response from isClient to resolve the
|
"", // it can be empty we're mocking the response from isClient
|
||||||
|
// to
|
||||||
|
// resolve the
|
||||||
// paths
|
// paths
|
||||||
outputDir.toString());
|
outputDir.toString());
|
||||||
|
|
||||||
|
@ -112,8 +112,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir)
|
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir) throws IOException {
|
||||||
throws IOException {
|
|
||||||
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
||||||
return Files.list(inputActionSetJsonDumpsDir)
|
return Files.list(inputActionSetJsonDumpsDir)
|
||||||
.map(
|
.map(
|
||||||
|
@ -132,36 +131,27 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
Files.list(inputActionSetJsonDumpsDir)
|
Files.list(inputActionSetJsonDumpsDir)
|
||||||
.forEach(
|
.forEach(
|
||||||
inputActionSetJsonDumpFile -> {
|
inputActionSetJsonDumpFile -> {
|
||||||
String inputActionSetId =
|
String inputActionSetId = inputActionSetJsonDumpFile.getFileName().toString();
|
||||||
inputActionSetJsonDumpFile.getFileName().toString();
|
|
||||||
Path inputActionSetDir = inputActionSetsDir.resolve(inputActionSetId);
|
Path inputActionSetDir = inputActionSetsDir.resolve(inputActionSetId);
|
||||||
|
|
||||||
Dataset<String> actionDS =
|
Dataset<String> actionDS =
|
||||||
readActionsFromJsonDump(inputActionSetJsonDumpFile.toString())
|
readActionsFromJsonDump(inputActionSetJsonDumpFile.toString()).cache();
|
||||||
.cache();
|
|
||||||
|
|
||||||
writeActionsAsJobInput(
|
writeActionsAsJobInput(actionDS, inputActionSetId, inputActionSetDir.toString());
|
||||||
actionDS, inputActionSetId, inputActionSetDir.toString());
|
|
||||||
|
|
||||||
Map<String, List<String>> actionSetOafsByType =
|
Map<String, List<String>> actionSetOafsByType =
|
||||||
actionDS
|
actionDS
|
||||||
.withColumn(
|
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||||
"atomic_action",
|
|
||||||
from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
|
||||||
.select(expr("atomic_action.*")).groupBy(col("clazz"))
|
.select(expr("atomic_action.*")).groupBy(col("clazz"))
|
||||||
.agg(collect_list(col("payload")).as("payload_list"))
|
.agg(collect_list(col("payload")).as("payload_list")).collectAsList().stream()
|
||||||
.collectAsList().stream()
|
|
||||||
.map(
|
.map(
|
||||||
row ->
|
row ->
|
||||||
new AbstractMap.SimpleEntry<>(
|
new AbstractMap.SimpleEntry<>(
|
||||||
row.<String>getAs("clazz"),
|
row.<String>getAs("clazz"),
|
||||||
mutableSeqAsJavaList(
|
mutableSeqAsJavaList(row.<Seq<String>>getAs("payload_list"))))
|
||||||
row.<Seq<String>>getAs(
|
|
||||||
"payload_list"))))
|
|
||||||
.collect(
|
.collect(
|
||||||
Collectors.toMap(
|
Collectors.toMap(
|
||||||
AbstractMap.SimpleEntry::getKey,
|
AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
|
||||||
AbstractMap.SimpleEntry::getValue));
|
|
||||||
|
|
||||||
actionSetOafsByType
|
actionSetOafsByType
|
||||||
.keySet()
|
.keySet()
|
||||||
|
@ -183,8 +173,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
|
|
||||||
private static Path getInputActionSetJsonDumpsDir() {
|
private static Path getInputActionSetJsonDumpsDir() {
|
||||||
return Paths.get(
|
return Paths.get(
|
||||||
Objects.requireNonNull(
|
Objects.requireNonNull(cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
||||||
cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
|
||||||
.getFile());
|
.getFile());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,20 +183,16 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
|
|
||||||
private static void writeActionsAsJobInput(
|
private static void writeActionsAsJobInput(
|
||||||
Dataset<String> actionDS, String inputActionSetId, String path) {
|
Dataset<String> actionDS, String inputActionSetId, String path) {
|
||||||
actionDS.javaRDD()
|
actionDS
|
||||||
|
.javaRDD()
|
||||||
.mapToPair(json -> new Tuple2<>(new Text(inputActionSetId), new Text(json)))
|
.mapToPair(json -> new Tuple2<>(new Text(inputActionSetId), new Text(json)))
|
||||||
.saveAsNewAPIHadoopFile(
|
.saveAsNewAPIHadoopFile(
|
||||||
path,
|
path, Text.class, Text.class, SequenceFileOutputFormat.class, configuration);
|
||||||
Text.class,
|
|
||||||
Text.class,
|
|
||||||
SequenceFileOutputFormat.class,
|
|
||||||
configuration);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Oaf> void assertForOafType(
|
private static <T extends Oaf> void assertForOafType(
|
||||||
Path outputDir, Map<String, List<String>> oafsByClassName, Class<T> clazz) {
|
Path outputDir, Map<String, List<String>> oafsByClassName, Class<T> clazz) {
|
||||||
Path outputDatasetDir =
|
Path outputDatasetDir = outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
||||||
outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
|
||||||
Files.exists(outputDatasetDir);
|
Files.exists(outputDatasetDir);
|
||||||
|
|
||||||
List<T> actuals =
|
List<T> actuals =
|
||||||
|
@ -225,13 +210,12 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
|
|
||||||
private static <T extends Oaf> Dataset<T> readActionPayloadFromJobOutput(
|
private static <T extends Oaf> Dataset<T> readActionPayloadFromJobOutput(
|
||||||
String path, Class<T> clazz) {
|
String path, Class<T> clazz) {
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.parquet(path)
|
.parquet(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Row, T>)
|
(MapFunction<Row, T>)
|
||||||
value ->
|
value -> OBJECT_MAPPER.readValue(value.<String>getAs("payload"), clazz),
|
||||||
OBJECT_MAPPER.readValue(
|
|
||||||
value.<String>getAs("payload"), clazz),
|
|
||||||
Encoders.bean(clazz));
|
Encoders.bean(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,7 +223,6 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
||||||
return rethrowAsRuntimeException(
|
return rethrowAsRuntimeException(
|
||||||
() -> OBJECT_MAPPER.readValue(json, clazz),
|
() -> OBJECT_MAPPER.readValue(json, clazz),
|
||||||
String.format(
|
String.format(
|
||||||
"failed to map json to class: json=%s, class=%s",
|
"failed to map json to class: json=%s, class=%s", json, clazz.getCanonicalName()));
|
||||||
json, clazz.getCanonicalName()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,7 @@ public class MergeAndGetTest {
|
||||||
Oaf b = mock(Oaf.class);
|
Oaf b = mock(Oaf.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -37,8 +36,7 @@ public class MergeAndGetTest {
|
||||||
Relation b = mock(Relation.class);
|
Relation b = mock(Relation.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -51,8 +49,7 @@ public class MergeAndGetTest {
|
||||||
OafEntity b = mock(OafEntity.class);
|
OafEntity b = mock(OafEntity.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -65,8 +62,7 @@ public class MergeAndGetTest {
|
||||||
Oaf b = mock(Oaf.class);
|
Oaf b = mock(Oaf.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -79,8 +75,7 @@ public class MergeAndGetTest {
|
||||||
OafEntity b = mock(OafEntity.class);
|
OafEntity b = mock(OafEntity.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -93,8 +88,7 @@ public class MergeAndGetTest {
|
||||||
Relation b = mock(Relation.class);
|
Relation b = mock(Relation.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
Oaf x = fn.get().apply(a, b);
|
Oaf x = fn.get().apply(a, b);
|
||||||
|
@ -110,8 +104,7 @@ public class MergeAndGetTest {
|
||||||
Oaf b = mock(Oaf.class);
|
Oaf b = mock(Oaf.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -124,8 +117,7 @@ public class MergeAndGetTest {
|
||||||
Relation b = mock(Relation.class);
|
Relation b = mock(Relation.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -141,8 +133,7 @@ public class MergeAndGetTest {
|
||||||
OafEntitySub2 b = mock(OafEntitySub2.class);
|
OafEntitySub2 b = mock(OafEntitySub2.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||||
|
@ -155,8 +146,7 @@ public class MergeAndGetTest {
|
||||||
OafEntity b = mock(OafEntity.class);
|
OafEntity b = mock(OafEntity.class);
|
||||||
|
|
||||||
// when
|
// when
|
||||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
|
||||||
|
|
||||||
// then
|
// then
|
||||||
Oaf x = fn.get().apply(a, b);
|
Oaf x = fn.get().apply(a, b);
|
||||||
|
|
|
@ -53,8 +53,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeEach() throws IOException {
|
public void beforeEach() throws IOException {
|
||||||
workingDir =
|
workingDir =
|
||||||
Files.createTempDirectory(
|
Files.createTempDirectory(PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
||||||
PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
|
||||||
inputDir = workingDir.resolve("input");
|
inputDir = workingDir.resolve("input");
|
||||||
inputGraphRootDir = inputDir.resolve("graph");
|
inputGraphRootDir = inputDir.resolve("graph");
|
||||||
inputActionPayloadRootDir = inputDir.resolve("action_payload");
|
inputActionPayloadRootDir = inputDir.resolve("action_payload");
|
||||||
|
@ -88,16 +87,20 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
() ->
|
() ->
|
||||||
PromoteActionPayloadForGraphTableJob.main(
|
PromoteActionPayloadForGraphTableJob.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged",
|
||||||
"-inputGraphTablePath", "",
|
Boolean.FALSE.toString(),
|
||||||
"-graphTableClassName", rowClazz.getCanonicalName(),
|
"-inputGraphTablePath",
|
||||||
"-inputActionPayloadPath", "",
|
"",
|
||||||
|
"-graphTableClassName",
|
||||||
|
rowClazz.getCanonicalName(),
|
||||||
|
"-inputActionPayloadPath",
|
||||||
|
"",
|
||||||
"-actionPayloadClassName",
|
"-actionPayloadClassName",
|
||||||
actionPayloadClazz.getCanonicalName(),
|
actionPayloadClazz.getCanonicalName(),
|
||||||
"-outputGraphTablePath", "",
|
"-outputGraphTablePath",
|
||||||
|
"",
|
||||||
"-mergeAndGetStrategy",
|
"-mergeAndGetStrategy",
|
||||||
MergeAndGet.Strategy.SELECT_NEWER_AND_GET
|
MergeAndGet.Strategy.SELECT_NEWER_AND_GET.name()
|
||||||
.name()
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// then
|
// then
|
||||||
|
@ -126,33 +129,38 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
// when
|
// when
|
||||||
PromoteActionPayloadForGraphTableJob.main(
|
PromoteActionPayloadForGraphTableJob.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged",
|
||||||
"-inputGraphTablePath", inputGraphTableDir.toString(),
|
Boolean.FALSE.toString(),
|
||||||
"-graphTableClassName", rowClazz.getCanonicalName(),
|
"-inputGraphTablePath",
|
||||||
"-inputActionPayloadPath", inputActionPayloadDir.toString(),
|
inputGraphTableDir.toString(),
|
||||||
"-actionPayloadClassName", actionPayloadClazz.getCanonicalName(),
|
"-graphTableClassName",
|
||||||
"-outputGraphTablePath", outputGraphTableDir.toString(),
|
rowClazz.getCanonicalName(),
|
||||||
"-mergeAndGetStrategy", strategy.name()
|
"-inputActionPayloadPath",
|
||||||
|
inputActionPayloadDir.toString(),
|
||||||
|
"-actionPayloadClassName",
|
||||||
|
actionPayloadClazz.getCanonicalName(),
|
||||||
|
"-outputGraphTablePath",
|
||||||
|
outputGraphTableDir.toString(),
|
||||||
|
"-mergeAndGetStrategy",
|
||||||
|
strategy.name()
|
||||||
});
|
});
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assertTrue(Files.exists(outputGraphTableDir));
|
assertTrue(Files.exists(outputGraphTableDir));
|
||||||
|
|
||||||
List<? extends Oaf> actualOutputRows =
|
List<? extends Oaf> actualOutputRows =
|
||||||
readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz)
|
readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz).collectAsList()
|
||||||
.collectAsList().stream()
|
.stream()
|
||||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
String expectedOutputGraphTableJsonDumpPath =
|
String expectedOutputGraphTableJsonDumpPath =
|
||||||
resultFileLocation(strategy, rowClazz, actionPayloadClazz);
|
resultFileLocation(strategy, rowClazz, actionPayloadClazz);
|
||||||
Path expectedOutputGraphTableJsonDumpFile =
|
Path expectedOutputGraphTableJsonDumpFile =
|
||||||
Paths.get(
|
Paths.get(
|
||||||
Objects.requireNonNull(
|
Objects.requireNonNull(cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
||||||
cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
|
||||||
.getFile());
|
.getFile());
|
||||||
List<? extends Oaf> expectedOutputRows =
|
List<? extends Oaf> expectedOutputRows =
|
||||||
readGraphTableFromJsonDump(
|
readGraphTableFromJsonDump(expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
||||||
expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
|
||||||
.collectAsList().stream()
|
.collectAsList().stream()
|
||||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
@ -170,42 +178,27 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||||
eu.dnetlib.dhp.schema.oaf.Dataset.class,
|
eu.dnetlib.dhp.schema.oaf.Dataset.class,
|
||||||
eu.dnetlib.dhp.schema.oaf.Result.class),
|
eu.dnetlib.dhp.schema.oaf.Result.class),
|
||||||
arguments(
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Datasource.class, Datasource.class),
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Organization.class, Organization.class),
|
||||||
Datasource.class,
|
|
||||||
Datasource.class),
|
|
||||||
arguments(
|
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
|
||||||
Organization.class,
|
|
||||||
Organization.class),
|
|
||||||
arguments(
|
arguments(
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||||
OtherResearchProduct.class,
|
OtherResearchProduct.class,
|
||||||
OtherResearchProduct.class),
|
OtherResearchProduct.class),
|
||||||
arguments(
|
arguments(
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
MergeAndGet.Strategy.MERGE_FROM_AND_GET, OtherResearchProduct.class, Result.class),
|
||||||
OtherResearchProduct.class,
|
|
||||||
Result.class),
|
|
||||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Project.class, Project.class),
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Project.class, Project.class),
|
||||||
arguments(
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Publication.class, Publication.class),
|
||||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
|
||||||
Publication.class,
|
|
||||||
Publication.class),
|
|
||||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Publication.class, Result.class),
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Publication.class, Result.class),
|
||||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Relation.class, Relation.class),
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Relation.class, Relation.class),
|
||||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Software.class),
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Software.class),
|
||||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Result.class));
|
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Result.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <G extends Oaf> Path createGraphTable(
|
private static <G extends Oaf> Path createGraphTable(Path inputGraphRootDir, Class<G> rowClazz) {
|
||||||
Path inputGraphRootDir, Class<G> rowClazz) {
|
|
||||||
String inputGraphTableJsonDumpPath = inputGraphTableJsonDumpLocation(rowClazz);
|
String inputGraphTableJsonDumpPath = inputGraphTableJsonDumpLocation(rowClazz);
|
||||||
Path inputGraphTableJsonDumpFile =
|
Path inputGraphTableJsonDumpFile =
|
||||||
Paths.get(
|
Paths.get(Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath)).getFile());
|
||||||
Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath))
|
Dataset<G> rowDS = readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
||||||
.getFile());
|
|
||||||
Dataset<G> rowDS =
|
|
||||||
readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
|
||||||
String inputGraphTableName = rowClazz.getSimpleName().toLowerCase();
|
String inputGraphTableName = rowClazz.getSimpleName().toLowerCase();
|
||||||
Path inputGraphTableDir = inputGraphRootDir.resolve(inputGraphTableName);
|
Path inputGraphTableDir = inputGraphRootDir.resolve(inputGraphTableName);
|
||||||
writeGraphTableAaJobInput(rowDS, inputGraphTableDir.toString());
|
writeGraphTableAaJobInput(rowDS, inputGraphTableDir.toString());
|
||||||
|
@ -215,13 +208,13 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
private static String inputGraphTableJsonDumpLocation(Class<? extends Oaf> rowClazz) {
|
private static String inputGraphTableJsonDumpLocation(Class<? extends Oaf> rowClazz) {
|
||||||
return String.format(
|
return String.format(
|
||||||
"%s/%s.json",
|
"%s/%s.json",
|
||||||
"eu/dnetlib/dhp/actionmanager/promote/input/graph",
|
"eu/dnetlib/dhp/actionmanager/promote/input/graph", rowClazz.getSimpleName().toLowerCase());
|
||||||
rowClazz.getSimpleName().toLowerCase());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <G extends Oaf> Dataset<G> readGraphTableFromJsonDump(
|
private static <G extends Oaf> Dataset<G> readGraphTableFromJsonDump(
|
||||||
String path, Class<G> rowClazz) {
|
String path, Class<G> rowClazz) {
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.textFile(path)
|
.textFile(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
||||||
|
@ -237,9 +230,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
String inputActionPayloadJsonDumpPath =
|
String inputActionPayloadJsonDumpPath =
|
||||||
inputActionPayloadJsonDumpLocation(rowClazz, actionPayloadClazz);
|
inputActionPayloadJsonDumpLocation(rowClazz, actionPayloadClazz);
|
||||||
Path inputActionPayloadJsonDumpFile =
|
Path inputActionPayloadJsonDumpFile =
|
||||||
Paths.get(
|
Paths.get(Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath)).getFile());
|
||||||
Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath))
|
|
||||||
.getFile());
|
|
||||||
Dataset<String> actionPayloadDS =
|
Dataset<String> actionPayloadDS =
|
||||||
readActionPayloadFromJsonDump(inputActionPayloadJsonDumpFile.toString());
|
readActionPayloadFromJsonDump(inputActionPayloadJsonDumpFile.toString());
|
||||||
Path inputActionPayloadDir =
|
Path inputActionPayloadDir =
|
||||||
|
@ -253,8 +244,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
|
|
||||||
return String.format(
|
return String.format(
|
||||||
"eu/dnetlib/dhp/actionmanager/promote/input/action_payload/%s_table/%s.json",
|
"eu/dnetlib/dhp/actionmanager/promote/input/action_payload/%s_table/%s.json",
|
||||||
rowClazz.getSimpleName().toLowerCase(),
|
rowClazz.getSimpleName().toLowerCase(), actionPayloadClazz.getSimpleName().toLowerCase());
|
||||||
actionPayloadClazz.getSimpleName().toLowerCase());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Dataset<String> readActionPayloadFromJsonDump(String path) {
|
private static Dataset<String> readActionPayloadFromJsonDump(String path) {
|
||||||
|
@ -267,7 +257,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
|
|
||||||
private static <G extends Oaf> Dataset<G> readGraphTableFromJobOutput(
|
private static <G extends Oaf> Dataset<G> readGraphTableFromJobOutput(
|
||||||
String path, Class<G> rowClazz) {
|
String path, Class<G> rowClazz) {
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.textFile(path)
|
.textFile(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
||||||
|
|
|
@ -50,13 +50,7 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
RuntimeException.class,
|
RuntimeException.class,
|
||||||
() ->
|
() ->
|
||||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
||||||
null,
|
null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
OafImplSubSub.class,
|
|
||||||
OafImpl.class));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -91,12 +85,10 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
Dataset<OafImplSubSub> actionPayloadDS =
|
Dataset<OafImplSubSub> actionPayloadDS =
|
||||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSubSub.class));
|
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSubSub.class));
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
() -> OafImplRoot::getId;
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> actionPayloadIdFn =
|
SerializableSupplier<Function<OafImplSubSub, String>> actionPayloadIdFn =
|
||||||
() -> OafImplRoot::getId;
|
() -> OafImplRoot::getId;
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>>
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
||||||
mergeAndGetFn =
|
|
||||||
() ->
|
() ->
|
||||||
(x, y) -> {
|
(x, y) -> {
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
|
@ -175,12 +167,10 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
Dataset<OafImplSub> actionPayloadDS =
|
Dataset<OafImplSub> actionPayloadDS =
|
||||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSub.class));
|
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSub.class));
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
() -> OafImplRoot::getId;
|
|
||||||
SerializableSupplier<Function<OafImplSub, String>> actionPayloadIdFn =
|
SerializableSupplier<Function<OafImplSub, String>> actionPayloadIdFn =
|
||||||
() -> OafImplRoot::getId;
|
() -> OafImplRoot::getId;
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>>
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>> mergeAndGetFn =
|
||||||
mergeAndGetFn =
|
|
||||||
() ->
|
() ->
|
||||||
(x, y) -> {
|
(x, y) -> {
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
|
@ -245,10 +235,8 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
Dataset<OafImplSubSub> rowDS =
|
Dataset<OafImplSubSub> rowDS =
|
||||||
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
||||||
|
|
||||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||||
() -> OafImplRoot::getId;
|
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
||||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>>
|
|
||||||
mergeAndGetFn =
|
|
||||||
() ->
|
() ->
|
||||||
(x, y) -> {
|
(x, y) -> {
|
||||||
x.merge(y);
|
x.merge(y);
|
||||||
|
@ -261,12 +249,7 @@ public class PromoteActionPayloadFunctionsTest {
|
||||||
// when
|
// when
|
||||||
List<OafImplSubSub> results =
|
List<OafImplSubSub> results =
|
||||||
PromoteActionPayloadFunctions.groupGraphTableByIdAndMerge(
|
PromoteActionPayloadFunctions.groupGraphTableByIdAndMerge(
|
||||||
rowDS,
|
rowDS, rowIdFn, mergeAndGetFn, zeroFn, isNotZeroFn, OafImplSubSub.class)
|
||||||
rowIdFn,
|
|
||||||
mergeAndGetFn,
|
|
||||||
zeroFn,
|
|
||||||
isNotZeroFn,
|
|
||||||
OafImplSubSub.class)
|
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
|
||||||
// then
|
// then
|
||||||
|
|
|
@ -51,8 +51,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
if (invalidRecords != null) invalidRecords.add(1);
|
if (invalidRecords != null) invalidRecords.add(1);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return new MetadataRecord(
|
return new MetadataRecord(originalIdentifier, encoding, provenance, input, dateOfCollection);
|
||||||
originalIdentifier, encoding, provenance, input, dateOfCollection);
|
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
if (invalidRecords != null) invalidRecords.add(1);
|
if (invalidRecords != null) invalidRecords.add(1);
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -69,8 +68,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
"/eu/dnetlib/dhp/collection/collection_input_parameters.json")));
|
"/eu/dnetlib/dhp/collection/collection_input_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||||
final Provenance provenance =
|
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
||||||
jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
|
||||||
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
||||||
|
|
||||||
final SparkSession spark =
|
final SparkSession spark =
|
||||||
|
@ -104,7 +102,8 @@ public class GenerateNativeStoreSparkJob {
|
||||||
null);
|
null);
|
||||||
|
|
||||||
final JavaRDD<MetadataRecord> mappeRDD =
|
final JavaRDD<MetadataRecord> mappeRDD =
|
||||||
inputRDD.map(
|
inputRDD
|
||||||
|
.map(
|
||||||
item ->
|
item ->
|
||||||
parseRecord(
|
parseRecord(
|
||||||
item._2().toString(),
|
item._2().toString(),
|
||||||
|
@ -121,10 +120,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
parser.get("workflowId"),
|
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||||
"DataFrameCreation",
|
|
||||||
MessageType.ONGOING,
|
|
||||||
ongoingMap),
|
|
||||||
parser.get("rabbitOngoingQueue"),
|
parser.get("rabbitOngoingQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
|
@ -138,10 +134,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
parser.get("workflowId"),
|
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||||
"DataFrameCreation",
|
|
||||||
MessageType.ONGOING,
|
|
||||||
ongoingMap),
|
|
||||||
parser.get("rabbitOngoingQueue"),
|
parser.get("rabbitOngoingQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
|
@ -152,8 +145,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
|
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
|
||||||
if (!test) {
|
if (!test) {
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(
|
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||||
parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
|
||||||
parser.get("rabbitReportQueue"),
|
parser.get("rabbitReportQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
|
|
|
@ -34,8 +34,7 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
||||||
final List<String> sets = new ArrayList<>();
|
final List<String> sets = new ArrayList<>();
|
||||||
if (setParam != null) {
|
if (setParam != null) {
|
||||||
sets.addAll(
|
sets.addAll(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
||||||
Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
|
||||||
}
|
}
|
||||||
if (sets.isEmpty()) {
|
if (sets.isEmpty()) {
|
||||||
// If no set is defined, ALL the sets must be harvested
|
// If no set is defined, ALL the sets must be harvested
|
||||||
|
@ -63,14 +62,11 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
||||||
.map(
|
.map(
|
||||||
set ->
|
set ->
|
||||||
getOaiIteratorFactory()
|
getOaiIteratorFactory()
|
||||||
.newIterator(
|
.newIterator(baseUrl, mdFormat, set, fromDate, untilDate))
|
||||||
baseUrl, mdFormat, set, fromDate,
|
|
||||||
untilDate))
|
|
||||||
.iterator();
|
.iterator();
|
||||||
|
|
||||||
return StreamSupport.stream(
|
return StreamSupport.stream(
|
||||||
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED),
|
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED), false);
|
||||||
false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public OaiIteratorFactory getOaiIteratorFactory() {
|
public OaiIteratorFactory getOaiIteratorFactory() {
|
||||||
|
|
|
@ -19,8 +19,8 @@ import org.dom4j.io.SAXReader;
|
||||||
|
|
||||||
public class OaiIterator implements Iterator<String> {
|
public class OaiIterator implements Iterator<String> {
|
||||||
|
|
||||||
private static final Log log =
|
private static final Log log = LogFactory.getLog(OaiIterator.class); // NOPMD by marko on
|
||||||
LogFactory.getLog(OaiIterator.class); // NOPMD by marko on 11/24/08 5:02 PM
|
// 11/24/08 5:02 PM
|
||||||
|
|
||||||
private final Queue<String> queue = new PriorityBlockingQueue<>();
|
private final Queue<String> queue = new PriorityBlockingQueue<>();
|
||||||
private final SAXReader reader = new SAXReader();
|
private final SAXReader reader = new SAXReader();
|
||||||
|
@ -91,9 +91,7 @@ public class OaiIterator implements Iterator<String> {
|
||||||
private String firstPage() throws DnetCollectorException {
|
private String firstPage() throws DnetCollectorException {
|
||||||
try {
|
try {
|
||||||
String url =
|
String url =
|
||||||
baseUrl
|
baseUrl + "?verb=ListRecords&metadataPrefix=" + URLEncoder.encode(mdFormat, "UTF-8");
|
||||||
+ "?verb=ListRecords&metadataPrefix="
|
|
||||||
+ URLEncoder.encode(mdFormat, "UTF-8");
|
|
||||||
if (set != null && !set.isEmpty()) {
|
if (set != null && !set.isEmpty()) {
|
||||||
url += "&set=" + URLEncoder.encode(set, "UTF-8");
|
url += "&set=" + URLEncoder.encode(set, "UTF-8");
|
||||||
}
|
}
|
||||||
|
@ -150,8 +148,7 @@ public class OaiIterator implements Iterator<String> {
|
||||||
} catch (final DocumentException e1) {
|
} catch (final DocumentException e1) {
|
||||||
final String resumptionToken = extractResumptionToken(xml);
|
final String resumptionToken = extractResumptionToken(xml);
|
||||||
if (resumptionToken == null) {
|
if (resumptionToken == null) {
|
||||||
throw new DnetCollectorException(
|
throw new DnetCollectorException("Error parsing cleaned document:" + cleaned, e1);
|
||||||
"Error parsing cleaned document:" + cleaned, e1);
|
|
||||||
}
|
}
|
||||||
return resumptionToken;
|
return resumptionToken;
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,8 +48,7 @@ public class DnetCollectorWorker {
|
||||||
final ApiDescriptor api =
|
final ApiDescriptor api =
|
||||||
jsonMapper.readValue(argumentParser.get("apidescriptor"), ApiDescriptor.class);
|
jsonMapper.readValue(argumentParser.get("apidescriptor"), ApiDescriptor.class);
|
||||||
|
|
||||||
final CollectorPlugin plugin =
|
final CollectorPlugin plugin = collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
||||||
collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
|
||||||
|
|
||||||
final String hdfsuri = argumentParser.get("namenode");
|
final String hdfsuri = argumentParser.get("namenode");
|
||||||
|
|
||||||
|
@ -80,7 +79,8 @@ public class DnetCollectorWorker {
|
||||||
SequenceFile.Writer.valueClass(Text.class))) {
|
SequenceFile.Writer.valueClass(Text.class))) {
|
||||||
final IntWritable key = new IntWritable(counter.get());
|
final IntWritable key = new IntWritable(counter.get());
|
||||||
final Text value = new Text();
|
final Text value = new Text();
|
||||||
plugin.collect(api)
|
plugin
|
||||||
|
.collect(api)
|
||||||
.forEach(
|
.forEach(
|
||||||
content -> {
|
content -> {
|
||||||
key.set(counter.getAndIncrement());
|
key.set(counter.getAndIncrement());
|
||||||
|
@ -92,13 +92,11 @@ public class DnetCollectorWorker {
|
||||||
"Sending message: "
|
"Sending message: "
|
||||||
+ manager.sendMessage(
|
+ manager.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get(
|
argumentParser.get("workflowId"),
|
||||||
"workflowId"),
|
|
||||||
"Collection",
|
"Collection",
|
||||||
MessageType.ONGOING,
|
MessageType.ONGOING,
|
||||||
ongoingMap),
|
ongoingMap),
|
||||||
argumentParser.get(
|
argumentParser.get("rabbitOngoingQueue"),
|
||||||
"rabbitOngoingQueue"),
|
|
||||||
true,
|
true,
|
||||||
false));
|
false));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -115,20 +113,14 @@ public class DnetCollectorWorker {
|
||||||
ongoingMap.put("ongoing", "" + counter.get());
|
ongoingMap.put("ongoing", "" + counter.get());
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get("workflowId"),
|
argumentParser.get("workflowId"), "Collection", MessageType.ONGOING, ongoingMap),
|
||||||
"Collection",
|
|
||||||
MessageType.ONGOING,
|
|
||||||
ongoingMap),
|
|
||||||
argumentParser.get("rabbitOngoingQueue"),
|
argumentParser.get("rabbitOngoingQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
reportMap.put("collected", "" + counter.get());
|
reportMap.put("collected", "" + counter.get());
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(
|
new Message(
|
||||||
argumentParser.get("workflowId"),
|
argumentParser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||||
"Collection",
|
|
||||||
MessageType.REPORT,
|
|
||||||
reportMap),
|
|
||||||
argumentParser.get("rabbitOngoingQueue"),
|
argumentParser.get("rabbitOngoingQueue"),
|
||||||
true,
|
true,
|
||||||
false);
|
false);
|
||||||
|
|
|
@ -6,8 +6,7 @@ import eu.dnetlib.dhp.collection.worker.DnetCollectorException;
|
||||||
|
|
||||||
public class CollectorPluginFactory {
|
public class CollectorPluginFactory {
|
||||||
|
|
||||||
public CollectorPlugin getPluginByProtocol(final String protocol)
|
public CollectorPlugin getPluginByProtocol(final String protocol) throws DnetCollectorException {
|
||||||
throws DnetCollectorException {
|
|
||||||
if (protocol == null) throw new DnetCollectorException("protocol cannot be null");
|
if (protocol == null) throw new DnetCollectorException("protocol cannot be null");
|
||||||
switch (protocol.toLowerCase().trim()) {
|
switch (protocol.toLowerCase().trim()) {
|
||||||
case "oai":
|
case "oai":
|
||||||
|
|
|
@ -51,15 +51,12 @@ public class HttpConnector {
|
||||||
* @return the content of the downloaded resource as InputStream
|
* @return the content of the downloaded resource as InputStream
|
||||||
* @throws DnetCollectorException when retrying more than maxNumberOfRetry times
|
* @throws DnetCollectorException when retrying more than maxNumberOfRetry times
|
||||||
*/
|
*/
|
||||||
public InputStream getInputSourceAsStream(final String requestUrl)
|
public InputStream getInputSourceAsStream(final String requestUrl) throws DnetCollectorException {
|
||||||
throws DnetCollectorException {
|
|
||||||
return attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
return attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||||
}
|
}
|
||||||
|
|
||||||
private String attemptDownlaodAsString(
|
private String attemptDownlaodAsString(
|
||||||
final String requestUrl,
|
final String requestUrl, final int retryNumber, final CollectorPluginErrorLogList errorList)
|
||||||
final int retryNumber,
|
|
||||||
final CollectorPluginErrorLogList errorList)
|
|
||||||
throws DnetCollectorException {
|
throws DnetCollectorException {
|
||||||
try {
|
try {
|
||||||
final InputStream s = attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
final InputStream s = attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||||
|
@ -79,14 +76,11 @@ public class HttpConnector {
|
||||||
}
|
}
|
||||||
|
|
||||||
private InputStream attemptDownload(
|
private InputStream attemptDownload(
|
||||||
final String requestUrl,
|
final String requestUrl, final int retryNumber, final CollectorPluginErrorLogList errorList)
|
||||||
final int retryNumber,
|
|
||||||
final CollectorPluginErrorLogList errorList)
|
|
||||||
throws DnetCollectorException {
|
throws DnetCollectorException {
|
||||||
|
|
||||||
if (retryNumber > maxNumberOfRetry) {
|
if (retryNumber > maxNumberOfRetry) {
|
||||||
throw new DnetCollectorException(
|
throw new DnetCollectorException("Max number of retries exceeded. Cause: \n " + errorList);
|
||||||
"Max number of retries exceeded. Cause: \n " + errorList);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug("Downloading " + requestUrl + " - try: " + retryNumber);
|
log.debug("Downloading " + requestUrl + " - try: " + retryNumber);
|
||||||
|
@ -94,8 +88,7 @@ public class HttpConnector {
|
||||||
InputStream input = null;
|
InputStream input = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final HttpURLConnection urlConn =
|
final HttpURLConnection urlConn = (HttpURLConnection) new URL(requestUrl).openConnection();
|
||||||
(HttpURLConnection) new URL(requestUrl).openConnection();
|
|
||||||
urlConn.setInstanceFollowRedirects(false);
|
urlConn.setInstanceFollowRedirects(false);
|
||||||
urlConn.setReadTimeout(readTimeOut * 1000);
|
urlConn.setReadTimeout(readTimeOut * 1000);
|
||||||
urlConn.addRequestProperty("User-Agent", userAgent);
|
urlConn.addRequestProperty("User-Agent", userAgent);
|
||||||
|
@ -105,8 +98,7 @@ public class HttpConnector {
|
||||||
}
|
}
|
||||||
|
|
||||||
final int retryAfter = obtainRetryAfter(urlConn.getHeaderFields());
|
final int retryAfter = obtainRetryAfter(urlConn.getHeaderFields());
|
||||||
if (retryAfter > 0
|
if (retryAfter > 0 && urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) {
|
||||||
&& urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) {
|
|
||||||
log.warn("waiting and repeating request after " + retryAfter + " sec.");
|
log.warn("waiting and repeating request after " + retryAfter + " sec.");
|
||||||
Thread.sleep(retryAfter * 1000);
|
Thread.sleep(retryAfter * 1000);
|
||||||
errorList.add("503 Service Unavailable");
|
errorList.add("503 Service Unavailable");
|
||||||
|
@ -119,21 +111,16 @@ public class HttpConnector {
|
||||||
errorList.add(
|
errorList.add(
|
||||||
String.format(
|
String.format(
|
||||||
"%s %s. Moved to: %s",
|
"%s %s. Moved to: %s",
|
||||||
urlConn.getResponseCode(),
|
urlConn.getResponseCode(), urlConn.getResponseMessage(), newUrl));
|
||||||
urlConn.getResponseMessage(),
|
|
||||||
newUrl));
|
|
||||||
urlConn.disconnect();
|
urlConn.disconnect();
|
||||||
return attemptDownload(newUrl, retryNumber + 1, errorList);
|
return attemptDownload(newUrl, retryNumber + 1, errorList);
|
||||||
} else if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
} else if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||||
log.error(
|
log.error(
|
||||||
String.format(
|
String.format(
|
||||||
"HTTP error: %s %s",
|
"HTTP error: %s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||||
urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
|
||||||
Thread.sleep(defaultDelay * 1000);
|
Thread.sleep(defaultDelay * 1000);
|
||||||
errorList.add(
|
errorList.add(
|
||||||
String.format(
|
String.format("%s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||||
"%s %s",
|
|
||||||
urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
|
||||||
urlConn.disconnect();
|
urlConn.disconnect();
|
||||||
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
||||||
} else {
|
} else {
|
||||||
|
@ -179,9 +166,7 @@ public class HttpConnector {
|
||||||
private String obtainNewLocation(final Map<String, List<String>> headerMap)
|
private String obtainNewLocation(final Map<String, List<String>> headerMap)
|
||||||
throws DnetCollectorException {
|
throws DnetCollectorException {
|
||||||
for (final String key : headerMap.keySet()) {
|
for (final String key : headerMap.keySet()) {
|
||||||
if (key != null
|
if (key != null && key.toLowerCase().equals("location") && headerMap.get(key).size() > 0) {
|
||||||
&& key.toLowerCase().equals("location")
|
|
||||||
&& headerMap.get(key).size() > 0) {
|
|
||||||
return headerMap.get(key).get(0);
|
return headerMap.get(key).get(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -198,12 +183,10 @@ public class HttpConnector {
|
||||||
new X509TrustManager() {
|
new X509TrustManager() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkClientTrusted(
|
public void checkClientTrusted(final X509Certificate[] xcs, final String string) {}
|
||||||
final X509Certificate[] xcs, final String string) {}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkServerTrusted(
|
public void checkServerTrusted(final X509Certificate[] xcs, final String string) {}
|
||||||
final X509Certificate[] xcs, final String string) {}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public X509Certificate[] getAcceptedIssuers() {
|
public X509Certificate[] getAcceptedIssuers() {
|
||||||
|
|
|
@ -19,8 +19,8 @@ public class XmlCleaner {
|
||||||
private static Pattern invalidControlCharPattern = Pattern.compile("&#x?1[0-9a-fA-F];");
|
private static Pattern invalidControlCharPattern = Pattern.compile("&#x?1[0-9a-fA-F];");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pattern that negates the allowable XML 4 byte unicode characters. Valid are: #x9 | #xA | #xD
|
* Pattern that negates the allowable XML 4 byte unicode characters. Valid are: #x9 | #xA | #xD |
|
||||||
* | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
* [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||||
*/
|
*/
|
||||||
private static Pattern invalidCharacterPattern =
|
private static Pattern invalidCharacterPattern =
|
||||||
Pattern.compile("[^\t\r\n\u0020-\uD7FF\uE000-\uFFFD]"); // $NON-NLS-1$
|
Pattern.compile("[^\t\r\n\u0020-\uD7FF\uE000-\uFFFD]"); // $NON-NLS-1$
|
||||||
|
@ -72,14 +72,14 @@ public class XmlCleaner {
|
||||||
badEntities.put("Ÿ", " "); // $NON-NLS-1$ //$NON-NLS-2$ // illegal HTML character
|
badEntities.put("Ÿ", " "); // $NON-NLS-1$ //$NON-NLS-2$ // illegal HTML character
|
||||||
// misc entities
|
// misc entities
|
||||||
badEntities.put("€", "\u20AC"); // $NON-NLS-1$ //$NON-NLS-2$ // euro
|
badEntities.put("€", "\u20AC"); // $NON-NLS-1$ //$NON-NLS-2$ // euro
|
||||||
badEntities.put(
|
badEntities.put("‘", "\u2018"); // $NON-NLS-1$ //$NON-NLS-2$ // left single quotation
|
||||||
"‘", "\u2018"); // $NON-NLS-1$ //$NON-NLS-2$ // left single quotation mark
|
// mark
|
||||||
badEntities.put(
|
badEntities.put("’", "\u2019"); // $NON-NLS-1$ //$NON-NLS-2$ // right single quotation
|
||||||
"’", "\u2019"); // $NON-NLS-1$ //$NON-NLS-2$ // right single quotation mark
|
// mark
|
||||||
// Latin 1 entities
|
// Latin 1 entities
|
||||||
badEntities.put(" ", "\u00A0"); // $NON-NLS-1$ //$NON-NLS-2$ // no-break space
|
badEntities.put(" ", "\u00A0"); // $NON-NLS-1$ //$NON-NLS-2$ // no-break space
|
||||||
badEntities.put(
|
badEntities.put("¡", "\u00A1"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted exclamation
|
||||||
"¡", "\u00A1"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted exclamation mark
|
// mark
|
||||||
badEntities.put("¢", "\u00A2"); // $NON-NLS-1$ //$NON-NLS-2$ // cent sign
|
badEntities.put("¢", "\u00A2"); // $NON-NLS-1$ //$NON-NLS-2$ // cent sign
|
||||||
badEntities.put("£", "\u00A3"); // $NON-NLS-1$ //$NON-NLS-2$ // pound sign
|
badEntities.put("£", "\u00A3"); // $NON-NLS-1$ //$NON-NLS-2$ // pound sign
|
||||||
badEntities.put("¤", "\u00A4"); // $NON-NLS-1$ //$NON-NLS-2$ // currency sign
|
badEntities.put("¤", "\u00A4"); // $NON-NLS-1$ //$NON-NLS-2$ // currency sign
|
||||||
|
@ -88,11 +88,10 @@ public class XmlCleaner {
|
||||||
badEntities.put("§", "\u00A7"); // $NON-NLS-1$ //$NON-NLS-2$ // section sign
|
badEntities.put("§", "\u00A7"); // $NON-NLS-1$ //$NON-NLS-2$ // section sign
|
||||||
badEntities.put("¨", "\u00A8"); // $NON-NLS-1$ //$NON-NLS-2$ // diaeresis
|
badEntities.put("¨", "\u00A8"); // $NON-NLS-1$ //$NON-NLS-2$ // diaeresis
|
||||||
badEntities.put("©", "\u00A9"); // $NON-NLS-1$ //$NON-NLS-2$ // copyright sign
|
badEntities.put("©", "\u00A9"); // $NON-NLS-1$ //$NON-NLS-2$ // copyright sign
|
||||||
badEntities.put(
|
badEntities.put("ª", "\u00AA"); // $NON-NLS-1$ //$NON-NLS-2$ // feminine ordinal
|
||||||
"ª", "\u00AA"); // $NON-NLS-1$ //$NON-NLS-2$ // feminine ordinal indicator
|
// indicator
|
||||||
badEntities.put(
|
badEntities.put("«", "\u00AB"); // $NON-NLS-2$ // left-pointing double angle
|
||||||
"«",
|
// quotation mark
|
||||||
"\u00AB"); //$NON-NLS-1$ //$NON-NLS-2$ // left-pointing double angle quotation mark
|
|
||||||
badEntities.put("¬", "\u00AC"); // $NON-NLS-1$ //$NON-NLS-2$ // not sign
|
badEntities.put("¬", "\u00AC"); // $NON-NLS-1$ //$NON-NLS-2$ // not sign
|
||||||
badEntities.put("­", "\u00AD"); // $NON-NLS-1$ //$NON-NLS-2$ // soft hyphen
|
badEntities.put("­", "\u00AD"); // $NON-NLS-1$ //$NON-NLS-2$ // soft hyphen
|
||||||
badEntities.put("®", "\u00AE"); // $NON-NLS-1$ //$NON-NLS-2$ // registered sign
|
badEntities.put("®", "\u00AE"); // $NON-NLS-1$ //$NON-NLS-2$ // registered sign
|
||||||
|
@ -107,205 +106,198 @@ public class XmlCleaner {
|
||||||
badEntities.put("·", "\u00B7"); // $NON-NLS-1$ //$NON-NLS-2$ // middle dot
|
badEntities.put("·", "\u00B7"); // $NON-NLS-1$ //$NON-NLS-2$ // middle dot
|
||||||
badEntities.put("¸", "\u00B8"); // $NON-NLS-1$ //$NON-NLS-2$ // cedilla
|
badEntities.put("¸", "\u00B8"); // $NON-NLS-1$ //$NON-NLS-2$ // cedilla
|
||||||
badEntities.put("¹", "\u00B9"); // $NON-NLS-1$ //$NON-NLS-2$ // superscript one
|
badEntities.put("¹", "\u00B9"); // $NON-NLS-1$ //$NON-NLS-2$ // superscript one
|
||||||
badEntities.put(
|
badEntities.put("º", "\u00BA"); // $NON-NLS-1$ //$NON-NLS-2$ // masculine ordinal
|
||||||
"º", "\u00BA"); // $NON-NLS-1$ //$NON-NLS-2$ // masculine ordinal indicator
|
// indicator
|
||||||
badEntities.put(
|
badEntities.put("»", "\u00BB"); // $NON-NLS-2$ // right-pointing double
|
||||||
"»",
|
// angle quotation
|
||||||
"\u00BB"); //$NON-NLS-1$ //$NON-NLS-2$ // right-pointing double angle quotation mark
|
// mark
|
||||||
badEntities.put(
|
badEntities.put("¼", "\u00BC"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one
|
||||||
"¼", "\u00BC"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one quarter
|
// quarter
|
||||||
badEntities.put(
|
badEntities.put("½", "\u00BD"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one
|
||||||
"½", "\u00BD"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one half
|
// half
|
||||||
badEntities.put(
|
badEntities.put("¾", "\u00BE"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction three
|
||||||
"¾",
|
// quarters
|
||||||
"\u00BE"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction three quarters
|
badEntities.put("¿", "\u00BF"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted question
|
||||||
badEntities.put(
|
// mark
|
||||||
"¿", "\u00BF"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted question mark
|
badEntities.put("À", "\u00C0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// A
|
||||||
"À",
|
// with grave
|
||||||
"\u00C0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with grave
|
badEntities.put("Á", "\u00C1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// A
|
||||||
"Á",
|
// with acute
|
||||||
"\u00C1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with acute
|
badEntities.put("Â", "\u00C2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||||
badEntities.put(
|
// with circumflex
|
||||||
"Â",
|
badEntities.put("Ã", "\u00C3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"\u00C2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with circumflex
|
// A
|
||||||
badEntities.put(
|
// with tilde
|
||||||
"Ã",
|
badEntities.put("Ä", "\u00C4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||||
"\u00C3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with tilde
|
// with
|
||||||
badEntities.put(
|
// diaeresis
|
||||||
"Ä",
|
badEntities.put("Å", "\u00C5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||||
"\u00C4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with diaeresis
|
// with ring above
|
||||||
badEntities.put(
|
badEntities.put("Æ", "\u00C6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Å",
|
// AE
|
||||||
"\u00C5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with ring above
|
badEntities.put("Ç", "\u00C7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// C
|
||||||
"Æ", "\u00C6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter AE
|
// with cedilla
|
||||||
badEntities.put(
|
badEntities.put("È", "\u00C8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ç",
|
// E
|
||||||
"\u00C7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter C with cedilla
|
// with grave
|
||||||
badEntities.put(
|
badEntities.put("É", "\u00C9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"È",
|
// E
|
||||||
"\u00C8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with grave
|
// with acute
|
||||||
badEntities.put(
|
badEntities.put("Ê", "\u00CA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E
|
||||||
"É",
|
// with circumflex
|
||||||
"\u00C9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with acute
|
badEntities.put("Ë", "\u00CB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E
|
||||||
badEntities.put(
|
// with
|
||||||
"Ê",
|
// diaeresis
|
||||||
"\u00CA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with circumflex
|
badEntities.put("Ì", "\u00CC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// I
|
||||||
"Ë",
|
// with grave
|
||||||
"\u00CB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with diaeresis
|
badEntities.put("Í", "\u00CD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// I
|
||||||
"Ì",
|
// with acute
|
||||||
"\u00CC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with grave
|
badEntities.put("Î", "\u00CE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I
|
||||||
badEntities.put(
|
// with circumflex
|
||||||
"Í",
|
badEntities.put("Ï", "\u00CF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I
|
||||||
"\u00CD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with acute
|
// with
|
||||||
badEntities.put(
|
// diaeresis
|
||||||
"Î",
|
|
||||||
"\u00CE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with circumflex
|
|
||||||
badEntities.put(
|
|
||||||
"Ï",
|
|
||||||
"\u00CF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with diaeresis
|
|
||||||
badEntities.put("Ð", "\u00D0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter ETH
|
badEntities.put("Ð", "\u00D0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter ETH
|
||||||
badEntities.put(
|
badEntities.put("Ñ", "\u00D1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ñ",
|
// N
|
||||||
"\u00D1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter N with tilde
|
// with tilde
|
||||||
badEntities.put(
|
badEntities.put("Ò", "\u00D2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ò",
|
// O
|
||||||
"\u00D2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with grave
|
// with grave
|
||||||
badEntities.put(
|
badEntities.put("Ó", "\u00D3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ó",
|
// O
|
||||||
"\u00D3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with acute
|
// with acute
|
||||||
badEntities.put(
|
badEntities.put("Ô", "\u00D4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O
|
||||||
"Ô",
|
// with circumflex
|
||||||
"\u00D4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with circumflex
|
badEntities.put("Õ", "\u00D5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// O
|
||||||
"Õ",
|
// with tilde
|
||||||
"\u00D5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with tilde
|
badEntities.put("Ö", "\u00D6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O
|
||||||
badEntities.put(
|
// with
|
||||||
"Ö",
|
// diaeresis
|
||||||
"\u00D6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with diaeresis
|
|
||||||
badEntities.put("×", "\u00D7"); // $NON-NLS-1$ //$NON-NLS-2$ // multiplication sign
|
badEntities.put("×", "\u00D7"); // $NON-NLS-1$ //$NON-NLS-2$ // multiplication sign
|
||||||
badEntities.put(
|
badEntities.put("Ø", "\u00D8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ø",
|
// O
|
||||||
"\u00D8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with stroke
|
// with stroke
|
||||||
badEntities.put(
|
badEntities.put("Ù", "\u00D9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ù",
|
// U
|
||||||
"\u00D9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with grave
|
// with grave
|
||||||
badEntities.put(
|
badEntities.put("Ú", "\u00DA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
"Ú",
|
// U
|
||||||
"\u00DA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with acute
|
// with acute
|
||||||
badEntities.put(
|
badEntities.put("Û", "\u00DB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U
|
||||||
"Û",
|
// with circumflex
|
||||||
"\u00DB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with circumflex
|
badEntities.put("Ü", "\u00DC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U
|
||||||
badEntities.put(
|
// with
|
||||||
"Ü",
|
// diaeresis
|
||||||
"\u00DC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with diaeresis
|
badEntities.put("Ý", "\u00DD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// Y
|
||||||
"Ý",
|
// with acute
|
||||||
"\u00DD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter Y with acute
|
badEntities.put("Þ", "\u00DE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||||
badEntities.put(
|
// THORN
|
||||||
"Þ", "\u00DE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter THORN
|
badEntities.put("ß", "\u00DF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter
|
||||||
badEntities.put(
|
// sharp s
|
||||||
"ß", "\u00DF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter sharp s
|
badEntities.put("à", "\u00E0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"à",
|
// grave
|
||||||
"\u00E0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with grave
|
badEntities.put("á", "\u00E1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"á",
|
// acute
|
||||||
"\u00E1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with acute
|
badEntities.put("â", "\u00E2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"â",
|
// circumflex
|
||||||
"\u00E2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with circumflex
|
badEntities.put("ã", "\u00E3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"ã",
|
// tilde
|
||||||
"\u00E3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with tilde
|
badEntities.put("ä", "\u00E4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"ä",
|
// diaeresis
|
||||||
"\u00E4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with diaeresis
|
badEntities.put("å", "\u00E5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||||
badEntities.put(
|
// with
|
||||||
"å",
|
// ring above
|
||||||
"\u00E5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with ring above
|
|
||||||
badEntities.put("æ", "\u00E6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter ae
|
badEntities.put("æ", "\u00E6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter ae
|
||||||
badEntities.put(
|
badEntities.put("ç", "\u00E7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter c
|
||||||
"ç",
|
// with
|
||||||
"\u00E7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter c with cedilla
|
// cedilla
|
||||||
badEntities.put(
|
badEntities.put("è", "\u00E8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||||
"è",
|
// with
|
||||||
"\u00E8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with grave
|
// grave
|
||||||
badEntities.put(
|
badEntities.put("é", "\u00E9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||||
"é",
|
// with
|
||||||
"\u00E9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with acute
|
// acute
|
||||||
badEntities.put(
|
badEntities.put("ê", "\u00EA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||||
"ê",
|
// with
|
||||||
"\u00EA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with circumflex
|
// circumflex
|
||||||
badEntities.put(
|
badEntities.put("ë", "\u00EB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||||
"ë",
|
// with
|
||||||
"\u00EB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with diaeresis
|
// diaeresis
|
||||||
badEntities.put(
|
badEntities.put("ì", "\u00EC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||||
"ì",
|
// with
|
||||||
"\u00EC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with grave
|
// grave
|
||||||
badEntities.put(
|
badEntities.put("í", "\u00ED"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||||
"í",
|
// with
|
||||||
"\u00ED"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with acute
|
// acute
|
||||||
badEntities.put(
|
badEntities.put("î", "\u00EE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||||
"î",
|
// with
|
||||||
"\u00EE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with circumflex
|
// circumflex
|
||||||
badEntities.put(
|
badEntities.put("ï", "\u00EF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||||
"ï",
|
// with
|
||||||
"\u00EF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with diaeresis
|
// diaeresis
|
||||||
badEntities.put("ð", "\u00F0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter eth
|
badEntities.put("ð", "\u00F0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter eth
|
||||||
badEntities.put(
|
badEntities.put("ñ", "\u00F1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter n
|
||||||
"ñ",
|
// with
|
||||||
"\u00F1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter n with tilde
|
// tilde
|
||||||
badEntities.put(
|
badEntities.put("ò", "\u00F2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"ò",
|
// with
|
||||||
"\u00F2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with grave
|
// grave
|
||||||
badEntities.put(
|
badEntities.put("ó", "\u00F3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"ó",
|
// with
|
||||||
"\u00F3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with acute
|
// acute
|
||||||
badEntities.put(
|
badEntities.put("ô", "\u00F4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"ô",
|
// with
|
||||||
"\u00F4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with circumflex
|
// circumflex
|
||||||
badEntities.put(
|
badEntities.put("õ", "\u00F5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"õ",
|
// with
|
||||||
"\u00F5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with tilde
|
// tilde
|
||||||
badEntities.put(
|
badEntities.put("ö", "\u00F6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"ö",
|
// with
|
||||||
"\u00F6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with diaeresis
|
// diaeresis
|
||||||
badEntities.put("÷", "\u00F7"); // $NON-NLS-1$ //$NON-NLS-2$ // division sign
|
badEntities.put("÷", "\u00F7"); // $NON-NLS-1$ //$NON-NLS-2$ // division sign
|
||||||
badEntities.put(
|
badEntities.put("ø", "\u00F8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||||
"ø",
|
// with
|
||||||
"\u00F8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with stroke
|
// stroke
|
||||||
badEntities.put(
|
badEntities.put("ù", "\u00F9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||||
"ù",
|
// with
|
||||||
"\u00F9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with grave
|
// grave
|
||||||
badEntities.put(
|
badEntities.put("ú", "\u00FA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||||
"ú",
|
// with
|
||||||
"\u00FA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with acute
|
// acute
|
||||||
badEntities.put(
|
badEntities.put("û", "\u00FB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||||
"û",
|
// with
|
||||||
"\u00FB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with circumflex
|
// circumflex
|
||||||
badEntities.put(
|
badEntities.put("ü", "\u00FC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||||
"ü",
|
// with
|
||||||
"\u00FC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with diaeresis
|
// diaeresis
|
||||||
badEntities.put(
|
badEntities.put("ý", "\u00FD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y
|
||||||
"ý",
|
// with
|
||||||
"\u00FD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y with acute
|
// acute
|
||||||
badEntities.put(
|
badEntities.put("þ", "\u00FE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter
|
||||||
"þ", "\u00FE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter thorn
|
// thorn
|
||||||
badEntities.put(
|
badEntities.put("ÿ", "\u00FF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y
|
||||||
"ÿ",
|
// with
|
||||||
"\u00FF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y with diaeresis
|
// diaeresis
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For each entity in the input that is not allowed in XML, replace the entity with its unicode
|
* For each entity in the input that is not allowed in XML, replace the entity with its unicode
|
||||||
* equivalent or remove it. For each instance of a bare {@literal &}, replace it with {@literal
|
* equivalent or remove it. For each instance of a bare {@literal &}, replace it with {@literal
|
||||||
* &<br/> } XML only allows 4 entities: {@literal &amp;}, {@literal &quot;},
|
* &<br/> } XML only allows 4 entities: {@literal &amp;}, {@literal &quot;}, {@literal
|
||||||
* {@literal &lt;} and {@literal &gt;}.
|
* &lt;} and {@literal &gt;}.
|
||||||
*
|
*
|
||||||
* @param broken the string to handle entities
|
* @param broken the string to handle entities
|
||||||
* @return the string with entities appropriately fixed up
|
* @return the string with entities appropriately fixed up
|
||||||
|
@ -335,9 +327,7 @@ public class XmlCleaner {
|
||||||
while (true) {
|
while (true) {
|
||||||
// if we are at the end of the string then just escape the '&';
|
// if we are at the end of the string then just escape the '&';
|
||||||
if (i >= working.length()) {
|
if (i >= working.length()) {
|
||||||
return working.substring(0, amp)
|
return working.substring(0, amp) + "&" + working.substring(amp + 1); // $NON-NLS-1$
|
||||||
+ "&"
|
|
||||||
+ working.substring(amp + 1); // $NON-NLS-1$
|
|
||||||
}
|
}
|
||||||
// if we have come to a ; then we have an entity
|
// if we have come to a ; then we have an entity
|
||||||
// If it is something that xml can't handle then replace it.
|
// If it is something that xml can't handle then replace it.
|
||||||
|
@ -351,10 +341,7 @@ public class XmlCleaner {
|
||||||
// Did we end an entity without finding a closing ;
|
// Did we end an entity without finding a closing ;
|
||||||
// Then treat it as an '&' that needs to be replaced with &
|
// Then treat it as an '&' that needs to be replaced with &
|
||||||
if (!Character.isLetterOrDigit(c)) {
|
if (!Character.isLetterOrDigit(c)) {
|
||||||
working =
|
working = working.substring(0, amp) + "&" + working.substring(amp + 1); // $NON-NLS-1$
|
||||||
working.substring(0, amp)
|
|
||||||
+ "&"
|
|
||||||
+ working.substring(amp + 1); // $NON-NLS-1$
|
|
||||||
amp = i + 4; // account for the 4 extra characters
|
amp = i + 4; // account for the 4 extra characters
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -375,8 +362,8 @@ public class XmlCleaner {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replace entity with its unicode equivalent, if it is not a valid XML entity. Otherwise strip
|
* Replace entity with its unicode equivalent, if it is not a valid XML entity. Otherwise strip it
|
||||||
* it out. XML only allows 4 entities: &amp;, &quot;, &lt; and &gt;.
|
* out. XML only allows 4 entities: &amp;, &quot;, &lt; and &gt;.
|
||||||
*
|
*
|
||||||
* @param entity the entity to be replaced
|
* @param entity the entity to be replaced
|
||||||
* @return the substitution for the entity, either itself, the unicode equivalent or an empty
|
* @return the substitution for the entity, either itself, the unicode equivalent or an empty
|
||||||
|
|
|
@ -45,15 +45,11 @@ public class TransformFunction implements MapFunction<MetadataRecord, MetadataRe
|
||||||
processor.registerExtensionFunction(cleanFunction);
|
processor.registerExtensionFunction(cleanFunction);
|
||||||
final XsltCompiler comp = processor.newXsltCompiler();
|
final XsltCompiler comp = processor.newXsltCompiler();
|
||||||
XsltExecutable xslt =
|
XsltExecutable xslt =
|
||||||
comp.compile(
|
comp.compile(new StreamSource(new ByteArrayInputStream(transformationRule.getBytes())));
|
||||||
new StreamSource(
|
|
||||||
new ByteArrayInputStream(transformationRule.getBytes())));
|
|
||||||
XdmNode source =
|
XdmNode source =
|
||||||
processor
|
processor
|
||||||
.newDocumentBuilder()
|
.newDocumentBuilder()
|
||||||
.build(
|
.build(new StreamSource(new ByteArrayInputStream(value.getBody().getBytes())));
|
||||||
new StreamSource(
|
|
||||||
new ByteArrayInputStream(value.getBody().getBytes())));
|
|
||||||
XsltTransformer trans = xslt.load();
|
XsltTransformer trans = xslt.load();
|
||||||
trans.setInitialContextNode(source);
|
trans.setInitialContextNode(source);
|
||||||
final StringWriter output = new StringWriter();
|
final StringWriter output = new StringWriter();
|
||||||
|
|
|
@ -41,8 +41,7 @@ public class TransformSparkJobNode {
|
||||||
final String workflowId = parser.get("workflowId");
|
final String workflowId = parser.get("workflowId");
|
||||||
final String trasformationRule =
|
final String trasformationRule =
|
||||||
extractXSLTFromTR(
|
extractXSLTFromTR(
|
||||||
Objects.requireNonNull(
|
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
|
||||||
DHPUtils.decompressString(parser.get("transformationRule"))));
|
|
||||||
final String master = parser.get("master");
|
final String master = parser.get("master");
|
||||||
final String rabbitUser = parser.get("rabbitUser");
|
final String rabbitUser = parser.get("rabbitUser");
|
||||||
final String rabbitPassword = parser.get("rabbitPassword");
|
final String rabbitPassword = parser.get("rabbitPassword");
|
||||||
|
@ -53,10 +52,7 @@ public class TransformSparkJobNode {
|
||||||
parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
||||||
|
|
||||||
final SparkSession spark =
|
final SparkSession spark =
|
||||||
SparkSession.builder()
|
SparkSession.builder().appName("TransformStoreSparkJob").master(master).getOrCreate();
|
||||||
.appName("TransformStoreSparkJob")
|
|
||||||
.master(master)
|
|
||||||
.getOrCreate();
|
|
||||||
|
|
||||||
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
||||||
final Dataset<MetadataRecord> mdstoreInput =
|
final Dataset<MetadataRecord> mdstoreInput =
|
||||||
|
@ -85,8 +81,7 @@ public class TransformSparkJobNode {
|
||||||
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
||||||
if (!test) {
|
if (!test) {
|
||||||
final MessageManager manager =
|
final MessageManager manager =
|
||||||
new MessageManager(
|
new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false, null);
|
||||||
rabbitHost, rabbitUser, rabbitPassword, false, false, null);
|
|
||||||
manager.sendMessage(
|
manager.sendMessage(
|
||||||
new Message(workflowId, "Transform", MessageType.REPORT, reportMap),
|
new Message(workflowId, "Transform", MessageType.REPORT, reportMap),
|
||||||
rabbitReportQueue,
|
rabbitReportQueue,
|
||||||
|
|
|
@ -33,24 +33,34 @@ public class CollectionJobTest {
|
||||||
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
|
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
|
||||||
GenerateNativeStoreSparkJob.main(
|
GenerateNativeStoreSparkJob.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-mt", "local",
|
"-mt",
|
||||||
"-w", "wid",
|
"local",
|
||||||
"-e", "XML",
|
"-w",
|
||||||
"-d", "" + System.currentTimeMillis(),
|
"wid",
|
||||||
"-p", new ObjectMapper().writeValueAsString(provenance),
|
"-e",
|
||||||
|
"XML",
|
||||||
|
"-d",
|
||||||
|
"" + System.currentTimeMillis(),
|
||||||
|
"-p",
|
||||||
|
new ObjectMapper().writeValueAsString(provenance),
|
||||||
"-x",
|
"-x",
|
||||||
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
|
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
|
||||||
"-i",
|
"-i",
|
||||||
this.getClass()
|
this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
|
||||||
.getResource("/eu/dnetlib/dhp/collection/native.seq")
|
"-o",
|
||||||
.toString(),
|
testDir.toString() + "/store",
|
||||||
"-o", testDir.toString() + "/store",
|
"-t",
|
||||||
"-t", "true",
|
"true",
|
||||||
"-ru", "",
|
"-ru",
|
||||||
"-rp", "",
|
"",
|
||||||
"-rh", "",
|
"-rp",
|
||||||
"-ro", "",
|
"",
|
||||||
"-rr", ""
|
"-rh",
|
||||||
|
"",
|
||||||
|
"-ro",
|
||||||
|
"",
|
||||||
|
"-rr",
|
||||||
|
""
|
||||||
});
|
});
|
||||||
System.out.println(new ObjectMapper().writeValueAsString(provenance));
|
System.out.println(new ObjectMapper().writeValueAsString(provenance));
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,8 +33,7 @@ public class DnetCollectorWorkerApplicationTests {
|
||||||
when(argumentParser.get("workflowId")).thenReturn("sandro");
|
when(argumentParser.get("workflowId")).thenReturn("sandro");
|
||||||
when(argumentParser.get("rabbitOngoingQueue")).thenReturn("sandro");
|
when(argumentParser.get("rabbitOngoingQueue")).thenReturn("sandro");
|
||||||
|
|
||||||
when(messageManager.sendMessage(
|
when(messageManager.sendMessage(any(Message.class), anyString(), anyBoolean(), anyBoolean()))
|
||||||
any(Message.class), anyString(), anyBoolean(), anyBoolean()))
|
|
||||||
.thenAnswer(
|
.thenAnswer(
|
||||||
a -> {
|
a -> {
|
||||||
System.out.println("sent message: " + a.getArguments()[0]);
|
System.out.println("sent message: " + a.getArguments()[0]);
|
||||||
|
@ -46,9 +45,7 @@ public class DnetCollectorWorkerApplicationTests {
|
||||||
System.out.println("Called");
|
System.out.println("Called");
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
worker =
|
worker = new DnetCollectorWorker(new CollectorPluginFactory(), argumentParser, messageManager);
|
||||||
new DnetCollectorWorker(
|
|
||||||
new CollectorPluginFactory(), argumentParser, messageManager);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterEach
|
@AfterEach
|
||||||
|
|
|
@ -43,16 +43,12 @@ public class TransformationJobTest {
|
||||||
XsltExecutable exp =
|
XsltExecutable exp =
|
||||||
comp.compile(
|
comp.compile(
|
||||||
new StreamSource(
|
new StreamSource(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/ext_simple.xsl")));
|
||||||
.getResourceAsStream(
|
|
||||||
"/eu/dnetlib/dhp/transform/ext_simple.xsl")));
|
|
||||||
XdmNode source =
|
XdmNode source =
|
||||||
proc.newDocumentBuilder()
|
proc.newDocumentBuilder()
|
||||||
.build(
|
.build(
|
||||||
new StreamSource(
|
new StreamSource(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
||||||
.getResourceAsStream(
|
|
||||||
"/eu/dnetlib/dhp/transform/input.xml")));
|
|
||||||
XsltTransformer trans = exp.load();
|
XsltTransformer trans = exp.load();
|
||||||
trans.setInitialContextNode(source);
|
trans.setInitialContextNode(source);
|
||||||
final StringWriter output = new StringWriter();
|
final StringWriter output = new StringWriter();
|
||||||
|
@ -73,22 +69,33 @@ public class TransformationJobTest {
|
||||||
final String xslt =
|
final String xslt =
|
||||||
DHPUtils.compressString(
|
DHPUtils.compressString(
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml")));
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml")));
|
|
||||||
TransformSparkJobNode.main(
|
TransformSparkJobNode.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-mt", "local",
|
"-mt",
|
||||||
"-i", mdstore_input,
|
"local",
|
||||||
"-o", mdstore_output,
|
"-i",
|
||||||
"-d", "1",
|
mdstore_input,
|
||||||
"-w", "1",
|
"-o",
|
||||||
"-tr", xslt,
|
mdstore_output,
|
||||||
"-t", "true",
|
"-d",
|
||||||
"-ru", "",
|
"1",
|
||||||
"-rp", "",
|
"-w",
|
||||||
"-rh", "",
|
"1",
|
||||||
"-ro", "",
|
"-tr",
|
||||||
"-rr", ""
|
xslt,
|
||||||
|
"-t",
|
||||||
|
"true",
|
||||||
|
"-ru",
|
||||||
|
"",
|
||||||
|
"-rp",
|
||||||
|
"",
|
||||||
|
"-rh",
|
||||||
|
"",
|
||||||
|
"-ro",
|
||||||
|
"",
|
||||||
|
"-rr",
|
||||||
|
""
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,8 +116,7 @@ public class TransformationJobTest {
|
||||||
public void testTransformFunction() throws Exception {
|
public void testTransformFunction() throws Exception {
|
||||||
SAXReader reader = new SAXReader();
|
SAXReader reader = new SAXReader();
|
||||||
Document document =
|
Document document =
|
||||||
reader.read(
|
reader.read(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
|
||||||
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
||||||
final String xslt = node.asXML();
|
final String xslt = node.asXML();
|
||||||
Map<String, Vocabulary> vocabularies = new HashMap<>();
|
Map<String, Vocabulary> vocabularies = new HashMap<>();
|
||||||
|
@ -122,8 +128,7 @@ public class TransformationJobTest {
|
||||||
MetadataRecord record = new MetadataRecord();
|
MetadataRecord record = new MetadataRecord();
|
||||||
record.setBody(
|
record.setBody(
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
|
||||||
|
|
||||||
final MetadataRecord result = tf.call(record);
|
final MetadataRecord result = tf.call(record);
|
||||||
assertNotNull(result.getBody());
|
assertNotNull(result.getBody());
|
||||||
|
@ -135,13 +140,11 @@ public class TransformationJobTest {
|
||||||
public void extractTr() throws Exception {
|
public void extractTr() throws Exception {
|
||||||
|
|
||||||
final String xmlTr =
|
final String xmlTr =
|
||||||
IOUtils.toString(
|
IOUtils.toString(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
|
||||||
|
|
||||||
SAXReader reader = new SAXReader();
|
SAXReader reader = new SAXReader();
|
||||||
Document document =
|
Document document =
|
||||||
reader.read(
|
reader.read(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
|
||||||
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
||||||
|
|
||||||
System.out.println(node.asXML());
|
System.out.println(node.asXML());
|
||||||
|
|
|
@ -40,8 +40,7 @@ abstract class AbstractSparkAction implements Serializable {
|
||||||
throws ISLookUpException, DocumentException, IOException {
|
throws ISLookUpException, DocumentException, IOException {
|
||||||
|
|
||||||
final String xquery =
|
final String xquery =
|
||||||
String.format(
|
String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||||
"/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
|
||||||
|
|
||||||
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
||||||
|
|
||||||
|
|
|
@ -42,11 +42,7 @@ public class DatePicker {
|
||||||
.filter(d -> inRange(d.getKey()))
|
.filter(d -> inRange(d.getKey()))
|
||||||
.sorted(reverseOrder(comparingByValue()))
|
.sorted(reverseOrder(comparingByValue()))
|
||||||
.collect(
|
.collect(
|
||||||
toMap(
|
toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e2, LinkedHashMap::new));
|
||||||
Map.Entry::getKey,
|
|
||||||
Map.Entry::getValue,
|
|
||||||
(e1, e2) -> e2,
|
|
||||||
LinkedHashMap::new));
|
|
||||||
|
|
||||||
// shortcut
|
// shortcut
|
||||||
if (sorted.size() == 0) {
|
if (sorted.size() == 0) {
|
||||||
|
@ -67,11 +63,7 @@ public class DatePicker {
|
||||||
final int max = sorted.values().iterator().next();
|
final int max = sorted.values().iterator().next();
|
||||||
Optional<String> first =
|
Optional<String> first =
|
||||||
sorted.entrySet().stream()
|
sorted.entrySet().stream()
|
||||||
.filter(
|
.filter(e -> e.getValue() == max && !endsWith(e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||||
e ->
|
|
||||||
e.getValue() == max
|
|
||||||
&& !endsWith(
|
|
||||||
e.getKey(), DATE_DEFAULT_SUFFIX))
|
|
||||||
.map(Map.Entry::getKey)
|
.map(Map.Entry::getKey)
|
||||||
.findFirst();
|
.findFirst();
|
||||||
if (first.isPresent()) {
|
if (first.isPresent()) {
|
||||||
|
@ -88,9 +80,7 @@ public class DatePicker {
|
||||||
return date;
|
return date;
|
||||||
} else {
|
} else {
|
||||||
final Optional<String> first =
|
final Optional<String> first =
|
||||||
accepted.stream()
|
accepted.stream().filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX)).findFirst();
|
||||||
.filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX))
|
|
||||||
.findFirst();
|
|
||||||
if (first.isPresent()) {
|
if (first.isPresent()) {
|
||||||
date.setValue(first.get());
|
date.setValue(first.get());
|
||||||
return date;
|
return date;
|
||||||
|
|
|
@ -34,7 +34,8 @@ public class DedupRecordFactory {
|
||||||
|
|
||||||
// <id, json_entity>
|
// <id, json_entity>
|
||||||
Dataset<Tuple2<String, T>> entities =
|
Dataset<Tuple2<String, T>> entities =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.textFile(entitiesInputPath)
|
.textFile(entitiesInputPath)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, Tuple2<String, T>>)
|
(MapFunction<String, Tuple2<String, T>>)
|
||||||
|
@ -46,7 +47,8 @@ public class DedupRecordFactory {
|
||||||
|
|
||||||
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
||||||
Dataset<Tuple2<String, String>> mergeRels =
|
Dataset<Tuple2<String, String>> mergeRels =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(mergeRelsInputPath)
|
.load(mergeRelsInputPath)
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass == 'merges'")
|
.where("relClass == 'merges'")
|
||||||
|
@ -58,14 +60,11 @@ public class DedupRecordFactory {
|
||||||
return mergeRels
|
return mergeRels
|
||||||
.joinWith(entities, mergeRels.col("_2").equalTo(entities.col("_1")), "inner")
|
.joinWith(entities, mergeRels.col("_2").equalTo(entities.col("_1")), "inner")
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<
|
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, T>>, Tuple2<String, T>>)
|
||||||
Tuple2<Tuple2<String, String>, Tuple2<String, T>>,
|
|
||||||
Tuple2<String, T>>)
|
|
||||||
value -> new Tuple2<>(value._1()._1(), value._2()._2()),
|
value -> new Tuple2<>(value._1()._1(), value._2()._2()),
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
|
||||||
.groupByKey(
|
.groupByKey(
|
||||||
(MapFunction<Tuple2<String, T>, String>) entity -> entity._1(),
|
(MapFunction<Tuple2<String, T>, String>) entity -> entity._1(), Encoders.STRING())
|
||||||
Encoders.STRING())
|
|
||||||
.mapGroups(
|
.mapGroups(
|
||||||
(MapGroupsFunction<String, Tuple2<String, T>, T>)
|
(MapGroupsFunction<String, Tuple2<String, T>, T>)
|
||||||
(key, values) -> entityMerger(key, values, ts, dataInfo),
|
(key, values) -> entityMerger(key, values, ts, dataInfo),
|
||||||
|
|
|
@ -35,14 +35,12 @@ public class DedupUtility {
|
||||||
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
||||||
|
|
||||||
String acc1 =
|
String acc1 =
|
||||||
String.format(
|
String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||||
"%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
|
||||||
accumulators.put(acc1, context.longAccumulator(acc1));
|
accumulators.put(acc1, context.longAccumulator(acc1));
|
||||||
String acc2 =
|
String acc2 =
|
||||||
String.format(
|
String.format(
|
||||||
"%s::%s",
|
"%s::%s",
|
||||||
dedupConf.getWf().getEntityType(),
|
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField());
|
||||||
"missing " + dedupConf.getWf().getOrderField());
|
|
||||||
accumulators.put(acc2, context.longAccumulator(acc2));
|
accumulators.put(acc2, context.longAccumulator(acc2));
|
||||||
String acc3 =
|
String acc3 =
|
||||||
String.format(
|
String.format(
|
||||||
|
@ -50,8 +48,7 @@ public class DedupUtility {
|
||||||
dedupConf.getWf().getEntityType(),
|
dedupConf.getWf().getEntityType(),
|
||||||
String.format(
|
String.format(
|
||||||
"Skipped records for count(%s) >= %s",
|
"Skipped records for count(%s) >= %s",
|
||||||
dedupConf.getWf().getOrderField(),
|
dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()));
|
||||||
dedupConf.getWf().getGroupMaxSize()));
|
|
||||||
accumulators.put(acc3, context.longAccumulator(acc3));
|
accumulators.put(acc3, context.longAccumulator(acc3));
|
||||||
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
||||||
accumulators.put(acc4, context.longAccumulator(acc4));
|
accumulators.put(acc4, context.longAccumulator(acc4));
|
||||||
|
@ -60,9 +57,7 @@ public class DedupUtility {
|
||||||
accumulators.put(acc5, context.longAccumulator(acc5));
|
accumulators.put(acc5, context.longAccumulator(acc5));
|
||||||
String acc6 =
|
String acc6 =
|
||||||
String.format(
|
String.format(
|
||||||
"%s::%s",
|
"%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold());
|
||||||
dedupConf.getWf().getEntityType(),
|
|
||||||
"d < " + dedupConf.getWf().getThreshold());
|
|
||||||
accumulators.put(acc6, context.longAccumulator(acc6));
|
accumulators.put(acc6, context.longAccumulator(acc6));
|
||||||
|
|
||||||
return accumulators;
|
return accumulators;
|
||||||
|
@ -106,10 +101,7 @@ public class DedupUtility {
|
||||||
final Map<String, Author> basePidAuthorMap =
|
final Map<String, Author> basePidAuthorMap =
|
||||||
base.stream()
|
base.stream()
|
||||||
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
||||||
.flatMap(
|
.flatMap(a -> a.getPid().stream().map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||||
a ->
|
|
||||||
a.getPid().stream()
|
|
||||||
.map(p -> new Tuple2<>(p.toComparableString(), a)))
|
|
||||||
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
||||||
|
|
||||||
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
||||||
|
@ -118,10 +110,7 @@ public class DedupUtility {
|
||||||
.flatMap(
|
.flatMap(
|
||||||
a ->
|
a ->
|
||||||
a.getPid().stream()
|
a.getPid().stream()
|
||||||
.filter(
|
.filter(p -> !basePidAuthorMap.containsKey(p.toComparableString()))
|
||||||
p ->
|
|
||||||
!basePidAuthorMap.containsKey(
|
|
||||||
p.toComparableString()))
|
|
||||||
.map(p -> new Tuple2<>(p, a)))
|
.map(p -> new Tuple2<>(p, a)))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
@ -167,14 +156,13 @@ public class DedupUtility {
|
||||||
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
||||||
} else {
|
} else {
|
||||||
return new JaroWinkler()
|
return new JaroWinkler()
|
||||||
.score(
|
.score(normalize(pa.getNormalisedFullname()), normalize(pb.getNormalisedFullname()));
|
||||||
normalize(pa.getNormalisedFullname()),
|
|
||||||
normalize(pb.getNormalisedFullname()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String normalize(final String s) {
|
private static String normalize(final String s) {
|
||||||
return nfd(s).toLowerCase()
|
return nfd(s)
|
||||||
|
.toLowerCase()
|
||||||
// do not compact the regexes in a single expression, would cause StackOverflowError
|
// do not compact the regexes in a single expression, would cause StackOverflowError
|
||||||
// in case
|
// in case
|
||||||
// of large input strings
|
// of large input strings
|
||||||
|
@ -219,8 +207,7 @@ public class DedupUtility {
|
||||||
final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
||||||
|
|
||||||
final String xquery =
|
final String xquery =
|
||||||
String.format(
|
String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||||
"/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
|
||||||
|
|
||||||
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,8 @@ public class Deduper implements Serializable {
|
||||||
Map<String, LongAccumulator> accumulators =
|
Map<String, LongAccumulator> accumulators =
|
||||||
DedupUtility.constructAccumulator(config, context.sc());
|
DedupUtility.constructAccumulator(config, context.sc());
|
||||||
|
|
||||||
return blocks.flatMapToPair(
|
return blocks
|
||||||
|
.flatMapToPair(
|
||||||
it -> {
|
it -> {
|
||||||
final SparkReporter reporter = new SparkReporter(accumulators);
|
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||||
new BlockProcessor(config)
|
new BlockProcessor(config)
|
||||||
|
|
|
@ -73,8 +73,7 @@ public class SparkCreateDedupRecord extends AbstractSparkAction {
|
||||||
DedupUtility.createMergeRelPath(workingPath, actionSetId, subEntity);
|
DedupUtility.createMergeRelPath(workingPath, actionSetId, subEntity);
|
||||||
final String entityPath = DedupUtility.createEntityPath(graphBasePath, subEntity);
|
final String entityPath = DedupUtility.createEntityPath(graphBasePath, subEntity);
|
||||||
|
|
||||||
final Class<OafEntity> clazz =
|
final Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
||||||
ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
|
||||||
final DataInfo dataInfo = getDataInfo(dedupConf);
|
final DataInfo dataInfo = getDataInfo(dedupConf);
|
||||||
DedupRecordFactory.createDedupRecord(spark, dataInfo, mergeRelPath, entityPath, clazz)
|
DedupRecordFactory.createDedupRecord(spark, dataInfo, mergeRelPath, entityPath, clazz)
|
||||||
.write()
|
.write()
|
||||||
|
|
|
@ -91,27 +91,16 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
||||||
|
|
||||||
final JavaPairRDD<Object, String> vertexes =
|
final JavaPairRDD<Object, String> vertexes =
|
||||||
sc.textFile(graphBasePath + "/" + subEntity)
|
sc.textFile(graphBasePath + "/" + subEntity)
|
||||||
.map(
|
.map(s -> MapDocumentUtil.getJPathString(dedupConf.getWf().getIdPath(), s))
|
||||||
s ->
|
.mapToPair((PairFunction<String, Object, String>) s -> new Tuple2<>(hash(s), s));
|
||||||
MapDocumentUtil.getJPathString(
|
|
||||||
dedupConf.getWf().getIdPath(), s))
|
|
||||||
.mapToPair(
|
|
||||||
(PairFunction<String, Object, String>)
|
|
||||||
s -> new Tuple2<>(hash(s), s));
|
|
||||||
|
|
||||||
final RDD<Edge<String>> edgeRdd =
|
final RDD<Edge<String>> edgeRdd =
|
||||||
spark.read()
|
spark
|
||||||
.load(
|
.read()
|
||||||
DedupUtility.createSimRelPath(
|
.load(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity))
|
||||||
workingPath, actionSetId, subEntity))
|
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.javaRDD()
|
.javaRDD()
|
||||||
.map(
|
.map(it -> new Edge<>(hash(it.getSource()), hash(it.getTarget()), it.getRelClass()))
|
||||||
it ->
|
|
||||||
new Edge<>(
|
|
||||||
hash(it.getSource()),
|
|
||||||
hash(it.getTarget()),
|
|
||||||
it.getRelClass()))
|
|
||||||
.rdd();
|
.rdd();
|
||||||
|
|
||||||
final Dataset<Relation> mergeRels =
|
final Dataset<Relation> mergeRels =
|
||||||
|
|
|
@ -46,9 +46,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
||||||
conf.registerKryoClasses(
|
conf.registerKryoClasses(
|
||||||
new Class[] {
|
new Class[] {MapDocument.class, FieldListImpl.class, FieldValueImpl.class, Block.class});
|
||||||
MapDocument.class, FieldListImpl.class, FieldValueImpl.class, Block.class
|
|
||||||
});
|
|
||||||
|
|
||||||
new SparkCreateSimRels(parser, getSparkSession(conf))
|
new SparkCreateSimRels(parser, getSparkSession(conf))
|
||||||
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
||||||
|
@ -76,8 +74,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
||||||
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
||||||
log.info("Creating simrels for: '{}'", subEntity);
|
log.info("Creating simrels for: '{}'", subEntity);
|
||||||
|
|
||||||
final String outputPath =
|
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
||||||
DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
|
|
||||||
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
@ -87,9 +84,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<String, String, MapDocument>)
|
(PairFunction<String, String, MapDocument>)
|
||||||
s -> {
|
s -> {
|
||||||
MapDocument d =
|
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||||
MapDocumentUtil.asMapDocumentWithJPath(
|
|
||||||
dedupConf, s);
|
|
||||||
return new Tuple2<>(d.getIdentifier(), d);
|
return new Tuple2<>(d.getIdentifier(), d);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -102,7 +97,8 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
||||||
.map(t -> createSimRel(t._1(), t._2(), entity));
|
.map(t -> createSimRel(t._1(), t._2(), entity));
|
||||||
|
|
||||||
// save the simrel in the workingdir
|
// save the simrel in the workingdir
|
||||||
spark.createDataset(relations.rdd(), Encoders.bean(Relation.class))
|
spark
|
||||||
|
.createDataset(relations.rdd(), Encoders.bean(Relation.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Append)
|
.mode(SaveMode.Append)
|
||||||
.save(outputPath);
|
.save(outputPath);
|
||||||
|
|
|
@ -62,7 +62,8 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
||||||
removeOutputDir(spark, outputRelationPath);
|
removeOutputDir(spark, outputRelationPath);
|
||||||
|
|
||||||
Dataset<Relation> mergeRels =
|
Dataset<Relation> mergeRels =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(DedupUtility.createMergeRelPath(workingPath, "*", "*"))
|
.load(DedupUtility.createMergeRelPath(workingPath, "*", "*"))
|
||||||
.as(Encoders.bean(Relation.class));
|
.as(Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
@ -80,17 +81,11 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
||||||
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
|
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
|
||||||
|
|
||||||
Dataset<Relation> rels =
|
Dataset<Relation> rels =
|
||||||
spark.read()
|
spark.read().textFile(relationPath).map(patchRelFn(), Encoders.bean(Relation.class));
|
||||||
.textFile(relationPath)
|
|
||||||
.map(patchRelFn(), Encoders.bean(Relation.class));
|
|
||||||
|
|
||||||
Dataset<Relation> newRels =
|
Dataset<Relation> newRels =
|
||||||
processDataset(
|
processDataset(
|
||||||
processDataset(
|
processDataset(rels, mergedIds, FieldType.SOURCE, getFixRelFn(FieldType.SOURCE)),
|
||||||
rels,
|
|
||||||
mergedIds,
|
|
||||||
FieldType.SOURCE,
|
|
||||||
getFixRelFn(FieldType.SOURCE)),
|
|
||||||
mergedIds,
|
mergedIds,
|
||||||
FieldType.TARGET,
|
FieldType.TARGET,
|
||||||
getFixRelFn(FieldType.TARGET))
|
getFixRelFn(FieldType.TARGET))
|
||||||
|
@ -113,11 +108,10 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
||||||
MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> mapFn) {
|
MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> mapFn) {
|
||||||
final Dataset<Tuple2<String, Relation>> mapped =
|
final Dataset<Tuple2<String, Relation>> mapped =
|
||||||
rels.map(
|
rels.map(
|
||||||
(MapFunction<Relation, Tuple2<String, Relation>>)
|
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(getId(r, type), r),
|
||||||
r -> new Tuple2<>(getId(r, type), r),
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class)));
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class)));
|
||||||
return mapped.joinWith(
|
return mapped
|
||||||
mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer")
|
.joinWith(mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer")
|
||||||
.map(mapFn, Encoders.bean(Relation.class));
|
.map(mapFn, Encoders.bean(Relation.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,16 +74,14 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
|
|
||||||
JavaRDD<String> sourceEntity =
|
JavaRDD<String> sourceEntity =
|
||||||
sc.textFile(
|
sc.textFile(DedupUtility.createEntityPath(graphBasePath, type.toString()));
|
||||||
DedupUtility.createEntityPath(graphBasePath, type.toString()));
|
|
||||||
|
|
||||||
if (mergeRelExists(workingPath, type.toString())) {
|
if (mergeRelExists(workingPath, type.toString())) {
|
||||||
|
|
||||||
final String mergeRelPath =
|
final String mergeRelPath =
|
||||||
DedupUtility.createMergeRelPath(workingPath, "*", type.toString());
|
DedupUtility.createMergeRelPath(workingPath, "*", type.toString());
|
||||||
final String dedupRecordPath =
|
final String dedupRecordPath =
|
||||||
DedupUtility.createDedupRecordPath(
|
DedupUtility.createDedupRecordPath(workingPath, "*", type.toString());
|
||||||
workingPath, "*", type.toString());
|
|
||||||
|
|
||||||
final Dataset<Relation> rel =
|
final Dataset<Relation> rel =
|
||||||
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||||
|
@ -94,25 +92,19 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
.distinct()
|
.distinct()
|
||||||
.toJavaRDD()
|
.toJavaRDD()
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Row, String, String>)
|
(PairFunction<Row, String, String>) r -> new Tuple2<>(r.getString(0), "d"));
|
||||||
r -> new Tuple2<>(r.getString(0), "d"));
|
|
||||||
|
|
||||||
JavaPairRDD<String, String> entitiesWithId =
|
JavaPairRDD<String, String> entitiesWithId =
|
||||||
sourceEntity.mapToPair(
|
sourceEntity.mapToPair(
|
||||||
(PairFunction<String, String, String>)
|
(PairFunction<String, String, String>)
|
||||||
s ->
|
s -> new Tuple2<>(MapDocumentUtil.getJPathString(IDJSONPATH, s), s));
|
||||||
new Tuple2<>(
|
|
||||||
MapDocumentUtil.getJPathString(
|
|
||||||
IDJSONPATH, s),
|
|
||||||
s));
|
|
||||||
JavaRDD<String> map =
|
JavaRDD<String> map =
|
||||||
entitiesWithId
|
entitiesWithId
|
||||||
.leftOuterJoin(mergedIds)
|
.leftOuterJoin(mergedIds)
|
||||||
.map(
|
.map(
|
||||||
k ->
|
k ->
|
||||||
k._2()._2().isPresent()
|
k._2()._2().isPresent()
|
||||||
? updateDeletedByInference(
|
? updateDeletedByInference(k._2()._1(), clazz)
|
||||||
k._2()._1(), clazz)
|
|
||||||
: k._2()._1());
|
: k._2()._1());
|
||||||
|
|
||||||
sourceEntity = map.union(sc.textFile(dedupRecordPath));
|
sourceEntity = map.union(sc.textFile(dedupRecordPath));
|
||||||
|
@ -133,9 +125,7 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
||||||
for (FileStatus fs : fileStatuses) {
|
for (FileStatus fs : fileStatuses) {
|
||||||
if (fs.isDirectory())
|
if (fs.isDirectory())
|
||||||
if (fileSystem.exists(
|
if (fileSystem.exists(
|
||||||
new Path(
|
new Path(DedupUtility.createMergeRelPath(basePath, fs.getPath().getName(), entity))))
|
||||||
DedupUtility.createMergeRelPath(
|
|
||||||
basePath, fs.getPath().getName(), entity))))
|
|
||||||
result = true;
|
result = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,9 +37,7 @@ public class Block implements Serializable {
|
||||||
block.setDocuments(
|
block.setDocuments(
|
||||||
StreamSupport.stream(it.spliterator(), false)
|
StreamSupport.stream(it.spliterator(), false)
|
||||||
.flatMap(b -> b.getDocuments().stream())
|
.flatMap(b -> b.getDocuments().stream())
|
||||||
.sorted(
|
.sorted(Comparator.comparing(a -> a.getFieldMap().get(orderField).stringValue()))
|
||||||
Comparator.comparing(
|
|
||||||
a -> a.getFieldMap().get(orderField).stringValue()))
|
|
||||||
.limit(maxSize)
|
.limit(maxSize)
|
||||||
.collect(Collectors.toCollection(ArrayList::new)));
|
.collect(Collectors.toCollection(ArrayList::new)));
|
||||||
return block;
|
return block;
|
||||||
|
@ -50,9 +48,7 @@ public class Block implements Serializable {
|
||||||
block.setKey(b1.getKey());
|
block.setKey(b1.getKey());
|
||||||
block.setDocuments(
|
block.setDocuments(
|
||||||
Stream.concat(b1.getDocuments().stream(), b2.getDocuments().stream())
|
Stream.concat(b1.getDocuments().stream(), b2.getDocuments().stream())
|
||||||
.sorted(
|
.sorted(Comparator.comparing(a -> a.getFieldMap().get(orderField).stringValue()))
|
||||||
Comparator.comparing(
|
|
||||||
a -> a.getFieldMap().get(orderField).stringValue()))
|
|
||||||
.limit(maxSize)
|
.limit(maxSize)
|
||||||
.collect(Collectors.toCollection(ArrayList::new)));
|
.collect(Collectors.toCollection(ArrayList::new)));
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,7 @@ public class MergeAuthorTest {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
final String json =
|
final String json =
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
this.getClass()
|
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/json/authors_merge.json"));
|
||||||
.getResourceAsStream(
|
|
||||||
"/eu/dnetlib/dhp/dedup/json/authors_merge.json"));
|
|
||||||
|
|
||||||
publicationsToMerge =
|
publicationsToMerge =
|
||||||
Arrays.asList(json.split("\n")).stream()
|
Arrays.asList(json.split("\n")).stream()
|
||||||
|
|
|
@ -51,20 +51,13 @@ public class SparkDedupTest implements Serializable {
|
||||||
public static void cleanUp() throws IOException, URISyntaxException {
|
public static void cleanUp() throws IOException, URISyntaxException {
|
||||||
|
|
||||||
testGraphBasePath =
|
testGraphBasePath =
|
||||||
Paths.get(
|
Paths.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI())
|
||||||
SparkDedupTest.class
|
|
||||||
.getResource("/eu/dnetlib/dhp/dedup/entities")
|
|
||||||
.toURI())
|
|
||||||
.toFile()
|
.toFile()
|
||||||
.getAbsolutePath();
|
.getAbsolutePath();
|
||||||
testOutputBasePath =
|
testOutputBasePath =
|
||||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
|
||||||
.toAbsolutePath()
|
|
||||||
.toString();
|
|
||||||
testDedupGraphBasePath =
|
testDedupGraphBasePath =
|
||||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
|
||||||
.toAbsolutePath()
|
|
||||||
.toString();
|
|
||||||
|
|
||||||
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
||||||
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
|
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
|
||||||
|
@ -118,9 +111,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/dedup/conf/ds.curr.conf.json")));
|
"/eu/dnetlib/dhp/dedup/conf/ds.curr.conf.json")));
|
||||||
|
|
||||||
lenient()
|
lenient()
|
||||||
.when(
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("otherresearchproduct")))
|
||||||
isLookUpService.getResourceProfileByQuery(
|
|
||||||
Mockito.contains("otherresearchproduct")))
|
|
||||||
.thenReturn(
|
.thenReturn(
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
SparkDedupTest.class.getResourceAsStream(
|
SparkDedupTest.class.getResourceAsStream(
|
||||||
|
@ -138,46 +129,45 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-i", testGraphBasePath,
|
"-i",
|
||||||
"-asi", testActionSetId,
|
testGraphBasePath,
|
||||||
"-la", "lookupurl",
|
"-asi",
|
||||||
"-w", testOutputBasePath
|
testActionSetId,
|
||||||
|
"-la",
|
||||||
|
"lookupurl",
|
||||||
|
"-w",
|
||||||
|
testOutputBasePath
|
||||||
});
|
});
|
||||||
|
|
||||||
new SparkCreateSimRels(parser, spark).run(isLookUpService);
|
new SparkCreateSimRels(parser, spark).run(isLookUpService);
|
||||||
|
|
||||||
long orgs_simrel =
|
long orgs_simrel =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_simrel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_simrel")
|
||||||
.count();
|
.count();
|
||||||
long pubs_simrel =
|
long pubs_simrel =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_simrel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_simrel")
|
||||||
.count();
|
.count();
|
||||||
long sw_simrel =
|
long sw_simrel =
|
||||||
spark.read()
|
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/software_simrel").count();
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_simrel")
|
|
||||||
.count();
|
|
||||||
|
|
||||||
long ds_simrel =
|
long ds_simrel =
|
||||||
spark.read()
|
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/dataset_simrel").count();
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_simrel")
|
|
||||||
.count();
|
|
||||||
|
|
||||||
long orp_simrel =
|
long orp_simrel =
|
||||||
spark.read()
|
spark
|
||||||
.load(
|
.read()
|
||||||
testOutputBasePath
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_simrel")
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/otherresearchproduct_simrel")
|
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(3432, orgs_simrel);
|
assertEquals(3432, orgs_simrel);
|
||||||
assertEquals(7260, pubs_simrel);
|
assertEquals(7152, pubs_simrel);
|
||||||
assertEquals(344, sw_simrel);
|
assertEquals(344, sw_simrel);
|
||||||
assertEquals(458, ds_simrel);
|
assertEquals(458, ds_simrel);
|
||||||
assertEquals(6740, orp_simrel);
|
assertEquals(6750, orp_simrel);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -191,46 +181,48 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-i", testGraphBasePath,
|
"-i",
|
||||||
"-asi", testActionSetId,
|
testGraphBasePath,
|
||||||
"-la", "lookupurl",
|
"-asi",
|
||||||
"-w", testOutputBasePath
|
testActionSetId,
|
||||||
|
"-la",
|
||||||
|
"lookupurl",
|
||||||
|
"-w",
|
||||||
|
testOutputBasePath
|
||||||
});
|
});
|
||||||
|
|
||||||
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
||||||
|
|
||||||
long orgs_mergerel =
|
long orgs_mergerel =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
||||||
.count();
|
.count();
|
||||||
long pubs_mergerel =
|
long pubs_mergerel =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
||||||
.count();
|
.count();
|
||||||
long sw_mergerel =
|
long sw_mergerel =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
long ds_mergerel =
|
long ds_mergerel =
|
||||||
spark.read()
|
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel").count();
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
|
||||||
.count();
|
|
||||||
|
|
||||||
long orp_mergerel =
|
long orp_mergerel =
|
||||||
spark.read()
|
spark
|
||||||
.load(
|
.read()
|
||||||
testOutputBasePath
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/otherresearchproduct_mergerel")
|
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(1276, orgs_mergerel);
|
assertEquals(1276, orgs_mergerel);
|
||||||
assertEquals(1460, pubs_mergerel);
|
assertEquals(1442, pubs_mergerel);
|
||||||
assertEquals(288, sw_mergerel);
|
assertEquals(288, sw_mergerel);
|
||||||
assertEquals(472, ds_mergerel);
|
assertEquals(472, ds_mergerel);
|
||||||
assertEquals(714, orp_mergerel);
|
assertEquals(718, orp_mergerel);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -244,40 +236,31 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json")));
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-i", testGraphBasePath,
|
"-i",
|
||||||
"-asi", testActionSetId,
|
testGraphBasePath,
|
||||||
"-la", "lookupurl",
|
"-asi",
|
||||||
"-w", testOutputBasePath
|
testActionSetId,
|
||||||
|
"-la",
|
||||||
|
"lookupurl",
|
||||||
|
"-w",
|
||||||
|
testOutputBasePath
|
||||||
});
|
});
|
||||||
|
|
||||||
new SparkCreateDedupRecord(parser, spark).run(isLookUpService);
|
new SparkCreateDedupRecord(parser, spark).run(isLookUpService);
|
||||||
|
|
||||||
long orgs_deduprecord =
|
long orgs_deduprecord =
|
||||||
jsc.textFile(
|
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_deduprecord")
|
||||||
testOutputBasePath
|
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/organization_deduprecord")
|
|
||||||
.count();
|
.count();
|
||||||
long pubs_deduprecord =
|
long pubs_deduprecord =
|
||||||
jsc.textFile(
|
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/publication_deduprecord")
|
||||||
testOutputBasePath
|
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/publication_deduprecord")
|
|
||||||
.count();
|
.count();
|
||||||
long sw_deduprecord =
|
long sw_deduprecord =
|
||||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord")
|
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord").count();
|
||||||
.count();
|
|
||||||
long ds_deduprecord =
|
long ds_deduprecord =
|
||||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/dataset_deduprecord")
|
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/dataset_deduprecord").count();
|
||||||
.count();
|
|
||||||
long orp_deduprecord =
|
long orp_deduprecord =
|
||||||
jsc.textFile(
|
jsc.textFile(
|
||||||
testOutputBasePath
|
testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_deduprecord")
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/otherresearchproduct_deduprecord")
|
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(82, orgs_deduprecord);
|
assertEquals(82, orgs_deduprecord);
|
||||||
|
@ -298,9 +281,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-i", testGraphBasePath,
|
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
||||||
"-w", testOutputBasePath,
|
|
||||||
"-o", testDedupGraphBasePath
|
|
||||||
});
|
});
|
||||||
|
|
||||||
new SparkUpdateEntity(parser, spark).run(isLookUpService);
|
new SparkUpdateEntity(parser, spark).run(isLookUpService);
|
||||||
|
@ -315,7 +296,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
jsc.textFile(testDedupGraphBasePath + "/otherresearchproduct").count();
|
jsc.textFile(testDedupGraphBasePath + "/otherresearchproduct").count();
|
||||||
|
|
||||||
long mergedOrgs =
|
long mergedOrgs =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
|
@ -325,7 +307,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
long mergedPubs =
|
long mergedPubs =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
|
@ -335,7 +318,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
long mergedSw =
|
long mergedSw =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
|
@ -345,7 +329,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
long mergedDs =
|
long mergedDs =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
|
@ -355,12 +340,9 @@ public class SparkDedupTest implements Serializable {
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
long mergedOrp =
|
long mergedOrp =
|
||||||
spark.read()
|
spark
|
||||||
.load(
|
.read()
|
||||||
testOutputBasePath
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
||||||
+ "/"
|
|
||||||
+ testActionSetId
|
|
||||||
+ "/otherresearchproduct_mergerel")
|
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
.javaRDD()
|
.javaRDD()
|
||||||
|
@ -419,9 +401,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
"/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
|
"/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
|
||||||
parser.parseArgument(
|
parser.parseArgument(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-i", testGraphBasePath,
|
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
||||||
"-w", testOutputBasePath,
|
|
||||||
"-o", testDedupGraphBasePath
|
|
||||||
});
|
});
|
||||||
|
|
||||||
new SparkPropagateRelation(parser, spark).run(isLookUpService);
|
new SparkPropagateRelation(parser, spark).run(isLookUpService);
|
||||||
|
@ -432,7 +412,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
// check deletedbyinference
|
// check deletedbyinference
|
||||||
final Dataset<Relation> mergeRels =
|
final Dataset<Relation> mergeRels =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*"))
|
.load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*"))
|
||||||
.as(Encoders.bean(Relation.class));
|
.as(Encoders.bean(Relation.class));
|
||||||
final JavaPairRDD<String, String> mergedIds =
|
final JavaPairRDD<String, String> mergedIds =
|
||||||
|
@ -447,18 +428,10 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
JavaRDD<String> toCheck =
|
JavaRDD<String> toCheck =
|
||||||
jsc.textFile(testDedupGraphBasePath + "/relation")
|
jsc.textFile(testDedupGraphBasePath + "/relation")
|
||||||
.mapToPair(
|
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.source", json), json))
|
||||||
json ->
|
|
||||||
new Tuple2<>(
|
|
||||||
MapDocumentUtil.getJPathString("$.source", json),
|
|
||||||
json))
|
|
||||||
.join(mergedIds)
|
.join(mergedIds)
|
||||||
.map(t -> t._2()._1())
|
.map(t -> t._2()._1())
|
||||||
.mapToPair(
|
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.target", json), json))
|
||||||
json ->
|
|
||||||
new Tuple2<>(
|
|
||||||
MapDocumentUtil.getJPathString("$.target", json),
|
|
||||||
json))
|
|
||||||
.join(mergedIds)
|
.join(mergedIds)
|
||||||
.map(t -> t._2()._1());
|
.map(t -> t._2()._1());
|
||||||
|
|
||||||
|
|
|
@ -42,11 +42,7 @@ public class DatePicker {
|
||||||
.filter(d -> inRange(d.getKey()))
|
.filter(d -> inRange(d.getKey()))
|
||||||
.sorted(reverseOrder(comparingByValue()))
|
.sorted(reverseOrder(comparingByValue()))
|
||||||
.collect(
|
.collect(
|
||||||
toMap(
|
toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e2, LinkedHashMap::new));
|
||||||
Map.Entry::getKey,
|
|
||||||
Map.Entry::getValue,
|
|
||||||
(e1, e2) -> e2,
|
|
||||||
LinkedHashMap::new));
|
|
||||||
|
|
||||||
// shortcut
|
// shortcut
|
||||||
if (sorted.size() == 0) {
|
if (sorted.size() == 0) {
|
||||||
|
@ -67,11 +63,7 @@ public class DatePicker {
|
||||||
final int max = sorted.values().iterator().next();
|
final int max = sorted.values().iterator().next();
|
||||||
Optional<String> first =
|
Optional<String> first =
|
||||||
sorted.entrySet().stream()
|
sorted.entrySet().stream()
|
||||||
.filter(
|
.filter(e -> e.getValue() == max && !endsWith(e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||||
e ->
|
|
||||||
e.getValue() == max
|
|
||||||
&& !endsWith(
|
|
||||||
e.getKey(), DATE_DEFAULT_SUFFIX))
|
|
||||||
.map(Map.Entry::getKey)
|
.map(Map.Entry::getKey)
|
||||||
.findFirst();
|
.findFirst();
|
||||||
if (first.isPresent()) {
|
if (first.isPresent()) {
|
||||||
|
@ -88,9 +80,7 @@ public class DatePicker {
|
||||||
return date;
|
return date;
|
||||||
} else {
|
} else {
|
||||||
final Optional<String> first =
|
final Optional<String> first =
|
||||||
accepted.stream()
|
accepted.stream().filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX)).findFirst();
|
||||||
.filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX))
|
|
||||||
.findFirst();
|
|
||||||
if (first.isPresent()) {
|
if (first.isPresent()) {
|
||||||
date.setValue(first.get());
|
date.setValue(first.get());
|
||||||
return date;
|
return date;
|
||||||
|
|
|
@ -32,33 +32,26 @@ public class DedupRecordFactory {
|
||||||
(PairFunction<String, String, String>)
|
(PairFunction<String, String, String>)
|
||||||
it ->
|
it ->
|
||||||
new Tuple2<String, String>(
|
new Tuple2<String, String>(
|
||||||
MapDocumentUtil.getJPathString(
|
MapDocumentUtil.getJPathString(dedupConf.getWf().getIdPath(), it), it));
|
||||||
dedupConf.getWf().getIdPath(), it),
|
|
||||||
it));
|
|
||||||
|
|
||||||
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
||||||
JavaPairRDD<String, String> mergeRels =
|
JavaPairRDD<String, String> mergeRels =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(mergeRelsInputPath)
|
.load(mergeRelsInputPath)
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass=='merges'")
|
.where("relClass=='merges'")
|
||||||
.javaRDD()
|
.javaRDD()
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Relation, String, String>)
|
(PairFunction<Relation, String, String>)
|
||||||
r ->
|
r -> new Tuple2<String, String>(r.getTarget(), r.getSource()));
|
||||||
new Tuple2<String, String>(
|
|
||||||
r.getTarget(), r.getSource()));
|
|
||||||
|
|
||||||
// <dedup_id, json_entity_merged>
|
// <dedup_id, json_entity_merged>
|
||||||
final JavaPairRDD<String, String> joinResult =
|
final JavaPairRDD<String, String> joinResult =
|
||||||
mergeRels
|
mergeRels
|
||||||
.join(inputJsonEntities)
|
.join(inputJsonEntities)
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<
|
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||||
Tuple2<String, Tuple2<String, String>>,
|
|
||||||
String,
|
|
||||||
String>)
|
|
||||||
Tuple2::_2);
|
|
||||||
|
|
||||||
JavaPairRDD<String, Iterable<String>> sortedJoinResult = joinResult.groupByKey();
|
JavaPairRDD<String, Iterable<String>> sortedJoinResult = joinResult.groupByKey();
|
||||||
|
|
||||||
|
@ -76,15 +69,13 @@ public class DedupRecordFactory {
|
||||||
case organization:
|
case organization:
|
||||||
return sortedJoinResult.map(o -> DedupRecordFactory.organizationMerger(o, ts));
|
return sortedJoinResult.map(o -> DedupRecordFactory.organizationMerger(o, ts));
|
||||||
case otherresearchproduct:
|
case otherresearchproduct:
|
||||||
return sortedJoinResult.map(
|
return sortedJoinResult.map(o -> DedupRecordFactory.otherresearchproductMerger(o, ts));
|
||||||
o -> DedupRecordFactory.otherresearchproductMerger(o, ts));
|
|
||||||
default:
|
default:
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Publication publicationMerger(
|
private static Publication publicationMerger(Tuple2<String, Iterable<String>> e, final long ts) {
|
||||||
Tuple2<String, Iterable<String>> e, final long ts) {
|
|
||||||
|
|
||||||
Publication p = new Publication(); // the result of the merge, to be returned at the end
|
Publication p = new Publication(); // the result of the merge, to be returned at the end
|
||||||
|
|
||||||
|
@ -96,20 +87,17 @@ public class DedupRecordFactory {
|
||||||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||||
|
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
pub -> {
|
pub -> {
|
||||||
try {
|
try {
|
||||||
Publication publication =
|
Publication publication = mapper.readValue(pub, Publication.class);
|
||||||
mapper.readValue(pub, Publication.class);
|
|
||||||
|
|
||||||
p.mergeFrom(publication);
|
p.mergeFrom(publication);
|
||||||
p.setAuthor(
|
p.setAuthor(DedupUtility.mergeAuthor(p.getAuthor(), publication.getAuthor()));
|
||||||
DedupUtility.mergeAuthor(
|
|
||||||
p.getAuthor(), publication.getAuthor()));
|
|
||||||
// add to the list if they are not null
|
// add to the list if they are not null
|
||||||
if (publication.getDateofacceptance() != null)
|
if (publication.getDateofacceptance() != null)
|
||||||
dateofacceptance.add(
|
dateofacceptance.add(publication.getDateofacceptance().getValue());
|
||||||
publication.getDateofacceptance().getValue());
|
|
||||||
} catch (Exception exc) {
|
} catch (Exception exc) {
|
||||||
throw new RuntimeException(exc);
|
throw new RuntimeException(exc);
|
||||||
}
|
}
|
||||||
|
@ -133,19 +121,17 @@ public class DedupRecordFactory {
|
||||||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||||
|
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
dat -> {
|
dat -> {
|
||||||
try {
|
try {
|
||||||
Dataset dataset = mapper.readValue(dat, Dataset.class);
|
Dataset dataset = mapper.readValue(dat, Dataset.class);
|
||||||
|
|
||||||
d.mergeFrom(dataset);
|
d.mergeFrom(dataset);
|
||||||
d.setAuthor(
|
d.setAuthor(DedupUtility.mergeAuthor(d.getAuthor(), dataset.getAuthor()));
|
||||||
DedupUtility.mergeAuthor(
|
|
||||||
d.getAuthor(), dataset.getAuthor()));
|
|
||||||
// add to the list if they are not null
|
// add to the list if they are not null
|
||||||
if (dataset.getDateofacceptance() != null)
|
if (dataset.getDateofacceptance() != null)
|
||||||
dateofacceptance.add(
|
dateofacceptance.add(dataset.getDateofacceptance().getValue());
|
||||||
dataset.getDateofacceptance().getValue());
|
|
||||||
} catch (Exception exc) {
|
} catch (Exception exc) {
|
||||||
throw new RuntimeException(exc);
|
throw new RuntimeException(exc);
|
||||||
}
|
}
|
||||||
|
@ -166,7 +152,8 @@ public class DedupRecordFactory {
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
proj -> {
|
proj -> {
|
||||||
try {
|
try {
|
||||||
Project project = mapper.readValue(proj, Project.class);
|
Project project = mapper.readValue(proj, Project.class);
|
||||||
|
@ -191,19 +178,17 @@ public class DedupRecordFactory {
|
||||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
soft -> {
|
soft -> {
|
||||||
try {
|
try {
|
||||||
Software software = mapper.readValue(soft, Software.class);
|
Software software = mapper.readValue(soft, Software.class);
|
||||||
|
|
||||||
s.mergeFrom(software);
|
s.mergeFrom(software);
|
||||||
s.setAuthor(
|
s.setAuthor(DedupUtility.mergeAuthor(s.getAuthor(), software.getAuthor()));
|
||||||
DedupUtility.mergeAuthor(
|
|
||||||
s.getAuthor(), software.getAuthor()));
|
|
||||||
// add to the list if they are not null
|
// add to the list if they are not null
|
||||||
if (software.getDateofacceptance() != null)
|
if (software.getDateofacceptance() != null)
|
||||||
dateofacceptance.add(
|
dateofacceptance.add(software.getDateofacceptance().getValue());
|
||||||
software.getDateofacceptance().getValue());
|
|
||||||
} catch (Exception exc) {
|
} catch (Exception exc) {
|
||||||
throw new RuntimeException(exc);
|
throw new RuntimeException(exc);
|
||||||
}
|
}
|
||||||
|
@ -221,7 +206,8 @@ public class DedupRecordFactory {
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
dat -> {
|
dat -> {
|
||||||
try {
|
try {
|
||||||
Datasource datasource = mapper.readValue(dat, Datasource.class);
|
Datasource datasource = mapper.readValue(dat, Datasource.class);
|
||||||
|
@ -250,14 +236,13 @@ public class DedupRecordFactory {
|
||||||
StringBuilder trust = new StringBuilder("0.0");
|
StringBuilder trust = new StringBuilder("0.0");
|
||||||
|
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
pub -> {
|
pub -> {
|
||||||
try {
|
try {
|
||||||
Organization organization =
|
Organization organization = mapper.readValue(pub, Organization.class);
|
||||||
mapper.readValue(pub, Organization.class);
|
|
||||||
|
|
||||||
final String currentTrust =
|
final String currentTrust = organization.getDataInfo().getTrust();
|
||||||
organization.getDataInfo().getTrust();
|
|
||||||
if (!"1.0".equals(currentTrust)) {
|
if (!"1.0".equals(currentTrust)) {
|
||||||
trust.setLength(0);
|
trust.setLength(0);
|
||||||
trust.append(currentTrust);
|
trust.append(currentTrust);
|
||||||
|
@ -282,8 +267,8 @@ public class DedupRecordFactory {
|
||||||
private static OtherResearchProduct otherresearchproductMerger(
|
private static OtherResearchProduct otherresearchproductMerger(
|
||||||
Tuple2<String, Iterable<String>> e, final long ts) {
|
Tuple2<String, Iterable<String>> e, final long ts) {
|
||||||
|
|
||||||
OtherResearchProduct o =
|
OtherResearchProduct o = new OtherResearchProduct(); // the result of the merge, to be
|
||||||
new OtherResearchProduct(); // the result of the merge, to be returned at the end
|
// returned at the end
|
||||||
|
|
||||||
o.setId(e._1());
|
o.setId(e._1());
|
||||||
|
|
||||||
|
@ -293,7 +278,8 @@ public class DedupRecordFactory {
|
||||||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||||
|
|
||||||
if (e._2() != null)
|
if (e._2() != null)
|
||||||
e._2().forEach(
|
e._2()
|
||||||
|
.forEach(
|
||||||
orp -> {
|
orp -> {
|
||||||
try {
|
try {
|
||||||
OtherResearchProduct otherResearchProduct =
|
OtherResearchProduct otherResearchProduct =
|
||||||
|
@ -301,15 +287,10 @@ public class DedupRecordFactory {
|
||||||
|
|
||||||
o.mergeFrom(otherResearchProduct);
|
o.mergeFrom(otherResearchProduct);
|
||||||
o.setAuthor(
|
o.setAuthor(
|
||||||
DedupUtility.mergeAuthor(
|
DedupUtility.mergeAuthor(o.getAuthor(), otherResearchProduct.getAuthor()));
|
||||||
o.getAuthor(),
|
|
||||||
otherResearchProduct.getAuthor()));
|
|
||||||
// add to the list if they are not null
|
// add to the list if they are not null
|
||||||
if (otherResearchProduct.getDateofacceptance() != null)
|
if (otherResearchProduct.getDateofacceptance() != null)
|
||||||
dateofacceptance.add(
|
dateofacceptance.add(otherResearchProduct.getDateofacceptance().getValue());
|
||||||
otherResearchProduct
|
|
||||||
.getDateofacceptance()
|
|
||||||
.getValue());
|
|
||||||
} catch (Exception exc) {
|
} catch (Exception exc) {
|
||||||
throw new RuntimeException(exc);
|
throw new RuntimeException(exc);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,14 +37,12 @@ public class DedupUtility {
|
||||||
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
||||||
|
|
||||||
String acc1 =
|
String acc1 =
|
||||||
String.format(
|
String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||||
"%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
|
||||||
accumulators.put(acc1, context.longAccumulator(acc1));
|
accumulators.put(acc1, context.longAccumulator(acc1));
|
||||||
String acc2 =
|
String acc2 =
|
||||||
String.format(
|
String.format(
|
||||||
"%s::%s",
|
"%s::%s",
|
||||||
dedupConf.getWf().getEntityType(),
|
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField());
|
||||||
"missing " + dedupConf.getWf().getOrderField());
|
|
||||||
accumulators.put(acc2, context.longAccumulator(acc2));
|
accumulators.put(acc2, context.longAccumulator(acc2));
|
||||||
String acc3 =
|
String acc3 =
|
||||||
String.format(
|
String.format(
|
||||||
|
@ -52,8 +50,7 @@ public class DedupUtility {
|
||||||
dedupConf.getWf().getEntityType(),
|
dedupConf.getWf().getEntityType(),
|
||||||
String.format(
|
String.format(
|
||||||
"Skipped records for count(%s) >= %s",
|
"Skipped records for count(%s) >= %s",
|
||||||
dedupConf.getWf().getOrderField(),
|
dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()));
|
||||||
dedupConf.getWf().getGroupMaxSize()));
|
|
||||||
accumulators.put(acc3, context.longAccumulator(acc3));
|
accumulators.put(acc3, context.longAccumulator(acc3));
|
||||||
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
||||||
accumulators.put(acc4, context.longAccumulator(acc4));
|
accumulators.put(acc4, context.longAccumulator(acc4));
|
||||||
|
@ -62,9 +59,7 @@ public class DedupUtility {
|
||||||
accumulators.put(acc5, context.longAccumulator(acc5));
|
accumulators.put(acc5, context.longAccumulator(acc5));
|
||||||
String acc6 =
|
String acc6 =
|
||||||
String.format(
|
String.format(
|
||||||
"%s::%s",
|
"%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold());
|
||||||
dedupConf.getWf().getEntityType(),
|
|
||||||
"d < " + dedupConf.getWf().getThreshold());
|
|
||||||
accumulators.put(acc6, context.longAccumulator(acc6));
|
accumulators.put(acc6, context.longAccumulator(acc6));
|
||||||
|
|
||||||
return accumulators;
|
return accumulators;
|
||||||
|
@ -139,10 +134,7 @@ public class DedupUtility {
|
||||||
final Map<String, Author> basePidAuthorMap =
|
final Map<String, Author> basePidAuthorMap =
|
||||||
base.stream()
|
base.stream()
|
||||||
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
||||||
.flatMap(
|
.flatMap(a -> a.getPid().stream().map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||||
a ->
|
|
||||||
a.getPid().stream()
|
|
||||||
.map(p -> new Tuple2<>(p.toComparableString(), a)))
|
|
||||||
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
||||||
|
|
||||||
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
||||||
|
@ -151,10 +143,7 @@ public class DedupUtility {
|
||||||
.flatMap(
|
.flatMap(
|
||||||
a ->
|
a ->
|
||||||
a.getPid().stream()
|
a.getPid().stream()
|
||||||
.filter(
|
.filter(p -> !basePidAuthorMap.containsKey(p.toComparableString()))
|
||||||
p ->
|
|
||||||
!basePidAuthorMap.containsKey(
|
|
||||||
p.toComparableString()))
|
|
||||||
.map(p -> new Tuple2<>(p, a)))
|
.map(p -> new Tuple2<>(p, a)))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
@ -193,14 +182,13 @@ public class DedupUtility {
|
||||||
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
||||||
} else {
|
} else {
|
||||||
return new JaroWinkler()
|
return new JaroWinkler()
|
||||||
.score(
|
.score(normalize(pa.getNormalisedFullname()), normalize(pb.getNormalisedFullname()));
|
||||||
normalize(pa.getNormalisedFullname()),
|
|
||||||
normalize(pb.getNormalisedFullname()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String normalize(final String s) {
|
private static String normalize(final String s) {
|
||||||
return nfd(s).toLowerCase()
|
return nfd(s)
|
||||||
|
.toLowerCase()
|
||||||
// do not compact the regexes in a single expression, would cause StackOverflowError
|
// do not compact the regexes in a single expression, would cause StackOverflowError
|
||||||
// in case
|
// in case
|
||||||
// of large input strings
|
// of large input strings
|
||||||
|
|
|
@ -70,7 +70,8 @@ public class Deduper implements Serializable {
|
||||||
Map<String, LongAccumulator> accumulators =
|
Map<String, LongAccumulator> accumulators =
|
||||||
DedupUtility.constructAccumulator(config, context.sc());
|
DedupUtility.constructAccumulator(config, context.sc());
|
||||||
|
|
||||||
return blocks.flatMapToPair(
|
return blocks
|
||||||
|
.flatMapToPair(
|
||||||
(PairFlatMapFunction<Tuple2<String, Iterable<MapDocument>>, String, String>)
|
(PairFlatMapFunction<Tuple2<String, Iterable<MapDocument>>, String, String>)
|
||||||
it -> {
|
it -> {
|
||||||
final SparkReporter reporter = new SparkReporter(accumulators);
|
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||||
|
@ -79,13 +80,10 @@ public class Deduper implements Serializable {
|
||||||
})
|
})
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
||||||
item ->
|
item -> new Tuple2<String, Tuple2<String, String>>(item._1() + item._2(), item))
|
||||||
new Tuple2<String, Tuple2<String, String>>(
|
|
||||||
item._1() + item._2(), item))
|
|
||||||
.reduceByKey((a, b) -> a)
|
.reduceByKey((a, b) -> a)
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>)
|
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||||
Tuple2::_2);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -95,9 +93,7 @@ public class Deduper implements Serializable {
|
||||||
* @param: the dedup configuration
|
* @param: the dedup configuration
|
||||||
*/
|
*/
|
||||||
public static JavaPairRDD<String, Iterable<MapDocument>> createBlocks(
|
public static JavaPairRDD<String, Iterable<MapDocument>> createBlocks(
|
||||||
JavaSparkContext context,
|
JavaSparkContext context, JavaPairRDD<String, MapDocument> mapDocs, DedupConfig config) {
|
||||||
JavaPairRDD<String, MapDocument> mapDocs,
|
|
||||||
DedupConfig config) {
|
|
||||||
return mapDocs
|
return mapDocs
|
||||||
// the reduce is just to be sure that we haven't document with same id
|
// the reduce is just to be sure that we haven't document with same id
|
||||||
.reduceByKey((a, b) -> a)
|
.reduceByKey((a, b) -> a)
|
||||||
|
@ -114,9 +110,7 @@ public class Deduper implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static JavaPairRDD<String, List<MapDocument>> createsortedBlocks(
|
public static JavaPairRDD<String, List<MapDocument>> createsortedBlocks(
|
||||||
JavaSparkContext context,
|
JavaSparkContext context, JavaPairRDD<String, MapDocument> mapDocs, DedupConfig config) {
|
||||||
JavaPairRDD<String, MapDocument> mapDocs,
|
|
||||||
DedupConfig config) {
|
|
||||||
final String of = config.getWf().getOrderField();
|
final String of = config.getWf().getOrderField();
|
||||||
final int maxQueueSize = config.getWf().getGroupMaxSize();
|
final int maxQueueSize = config.getWf().getGroupMaxSize();
|
||||||
return mapDocs
|
return mapDocs
|
||||||
|
@ -130,8 +124,7 @@ public class Deduper implements Serializable {
|
||||||
DedupUtility.getGroupingKeys(config, a).stream()
|
DedupUtility.getGroupingKeys(config, a).stream()
|
||||||
.map(
|
.map(
|
||||||
it -> {
|
it -> {
|
||||||
List<MapDocument> tmp =
|
List<MapDocument> tmp = new ArrayList<>();
|
||||||
new ArrayList<>();
|
|
||||||
tmp.add(a);
|
tmp.add(a);
|
||||||
return new Tuple2<>(it, tmp);
|
return new Tuple2<>(it, tmp);
|
||||||
})
|
})
|
||||||
|
@ -141,11 +134,8 @@ public class Deduper implements Serializable {
|
||||||
(Function2<List<MapDocument>, List<MapDocument>, List<MapDocument>>)
|
(Function2<List<MapDocument>, List<MapDocument>, List<MapDocument>>)
|
||||||
(v1, v2) -> {
|
(v1, v2) -> {
|
||||||
v1.addAll(v2);
|
v1.addAll(v2);
|
||||||
v1.sort(
|
v1.sort(Comparator.comparing(a -> a.getFieldMap().get(of).stringValue()));
|
||||||
Comparator.comparing(
|
if (v1.size() > maxQueueSize) return new ArrayList<>(v1.subList(0, maxQueueSize));
|
||||||
a -> a.getFieldMap().get(of).stringValue()));
|
|
||||||
if (v1.size() > maxQueueSize)
|
|
||||||
return new ArrayList<>(v1.subList(0, maxQueueSize));
|
|
||||||
return v1;
|
return v1;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -162,42 +152,33 @@ public class Deduper implements Serializable {
|
||||||
return entities.mapToPair(
|
return entities.mapToPair(
|
||||||
(PairFunction<String, String, MapDocument>)
|
(PairFunction<String, String, MapDocument>)
|
||||||
s -> {
|
s -> {
|
||||||
MapDocument mapDocument =
|
MapDocument mapDocument = MapDocumentUtil.asMapDocumentWithJPath(config, s);
|
||||||
MapDocumentUtil.asMapDocumentWithJPath(config, s);
|
return new Tuple2<String, MapDocument>(mapDocument.getIdentifier(), mapDocument);
|
||||||
return new Tuple2<String, MapDocument>(
|
|
||||||
mapDocument.getIdentifier(), mapDocument);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static JavaPairRDD<String, String> computeRelations2(
|
public static JavaPairRDD<String, String> computeRelations2(
|
||||||
JavaSparkContext context,
|
JavaSparkContext context, JavaPairRDD<String, List<MapDocument>> blocks, DedupConfig config) {
|
||||||
JavaPairRDD<String, List<MapDocument>> blocks,
|
|
||||||
DedupConfig config) {
|
|
||||||
Map<String, LongAccumulator> accumulators =
|
Map<String, LongAccumulator> accumulators =
|
||||||
DedupUtility.constructAccumulator(config, context.sc());
|
DedupUtility.constructAccumulator(config, context.sc());
|
||||||
|
|
||||||
return blocks.flatMapToPair(
|
return blocks
|
||||||
|
.flatMapToPair(
|
||||||
(PairFlatMapFunction<Tuple2<String, List<MapDocument>>, String, String>)
|
(PairFlatMapFunction<Tuple2<String, List<MapDocument>>, String, String>)
|
||||||
it -> {
|
it -> {
|
||||||
try {
|
try {
|
||||||
final SparkReporter reporter =
|
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||||
new SparkReporter(accumulators);
|
new BlockProcessor(config).processSortedBlock(it._1(), it._2(), reporter);
|
||||||
new BlockProcessor(config)
|
|
||||||
.processSortedBlock(it._1(), it._2(), reporter);
|
|
||||||
return reporter.getRelations().iterator();
|
return reporter.getRelations().iterator();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(
|
throw new RuntimeException(it._2().get(0).getIdentifier(), e);
|
||||||
it._2().get(0).getIdentifier(), e);
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
||||||
item ->
|
item -> new Tuple2<String, Tuple2<String, String>>(item._1() + item._2(), item))
|
||||||
new Tuple2<String, Tuple2<String, String>>(
|
|
||||||
item._1() + item._2(), item))
|
|
||||||
.reduceByKey((a, b) -> a)
|
.reduceByKey((a, b) -> a)
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>)
|
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||||
Tuple2::_2);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,8 @@ public class SparkCreateConnectedComponent {
|
||||||
s -> new Tuple2<Object, String>(getHashcode(s), s));
|
s -> new Tuple2<Object, String>(getHashcode(s), s));
|
||||||
|
|
||||||
final Dataset<Relation> similarityRelations =
|
final Dataset<Relation> similarityRelations =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(DedupUtility.createSimRelPath(targetPath, entity))
|
.load(DedupUtility.createSimRelPath(targetPath, entity))
|
||||||
.as(Encoders.bean(Relation.class));
|
.as(Encoders.bean(Relation.class));
|
||||||
final RDD<Edge<String>> edgeRdd =
|
final RDD<Edge<String>> edgeRdd =
|
||||||
|
@ -62,13 +63,10 @@ public class SparkCreateConnectedComponent {
|
||||||
.map(
|
.map(
|
||||||
it ->
|
it ->
|
||||||
new Edge<>(
|
new Edge<>(
|
||||||
getHashcode(it.getSource()),
|
getHashcode(it.getSource()), getHashcode(it.getTarget()), it.getRelClass()))
|
||||||
getHashcode(it.getTarget()),
|
|
||||||
it.getRelClass()))
|
|
||||||
.rdd();
|
.rdd();
|
||||||
final JavaRDD<ConnectedComponent> cc =
|
final JavaRDD<ConnectedComponent> cc =
|
||||||
GraphProcessor.findCCs(
|
GraphProcessor.findCCs(vertexes.rdd(), edgeRdd, dedupConf.getWf().getMaxIterations())
|
||||||
vertexes.rdd(), edgeRdd, dedupConf.getWf().getMaxIterations())
|
|
||||||
.toJavaRDD();
|
.toJavaRDD();
|
||||||
final Dataset<Relation> mergeRelation =
|
final Dataset<Relation> mergeRelation =
|
||||||
spark.createDataset(
|
spark.createDataset(
|
||||||
|
@ -79,21 +77,16 @@ public class SparkCreateConnectedComponent {
|
||||||
c.getDocIds().stream()
|
c.getDocIds().stream()
|
||||||
.flatMap(
|
.flatMap(
|
||||||
id -> {
|
id -> {
|
||||||
List<Relation> tmp =
|
List<Relation> tmp = new ArrayList<>();
|
||||||
new ArrayList<>();
|
Relation r = new Relation();
|
||||||
Relation r =
|
r.setSource(c.getCcId());
|
||||||
new Relation();
|
|
||||||
r.setSource(
|
|
||||||
c.getCcId());
|
|
||||||
r.setTarget(id);
|
r.setTarget(id);
|
||||||
r.setRelClass("merges");
|
r.setRelClass("merges");
|
||||||
tmp.add(r);
|
tmp.add(r);
|
||||||
r = new Relation();
|
r = new Relation();
|
||||||
r.setTarget(
|
r.setTarget(c.getCcId());
|
||||||
c.getCcId());
|
|
||||||
r.setSource(id);
|
r.setSource(id);
|
||||||
r.setRelClass(
|
r.setRelClass("isMergedIn");
|
||||||
"isMergedIn");
|
|
||||||
tmp.add(r);
|
tmp.add(r);
|
||||||
return tmp.stream();
|
return tmp.stream();
|
||||||
})
|
})
|
||||||
|
|
|
@ -46,8 +46,7 @@ public class SparkCreateSimRels {
|
||||||
sc.textFile(inputPath + "/" + entity)
|
sc.textFile(inputPath + "/" + entity)
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
s -> {
|
s -> {
|
||||||
MapDocument d =
|
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||||
MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
|
||||||
return new Tuple2<>(d.getIdentifier(), d);
|
return new Tuple2<>(d.getIdentifier(), d);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -58,8 +57,7 @@ public class SparkCreateSimRels {
|
||||||
// mapDocument, dedupConf);
|
// mapDocument, dedupConf);
|
||||||
|
|
||||||
// create relations by comparing only elements in the same group
|
// create relations by comparing only elements in the same group
|
||||||
final JavaPairRDD<String, String> dedupRels =
|
final JavaPairRDD<String, String> dedupRels = Deduper.computeRelations2(sc, blocks, dedupConf);
|
||||||
Deduper.computeRelations2(sc, blocks, dedupConf);
|
|
||||||
// final JavaPairRDD<String,String> dedupRels = Deduper.computeRelations(sc, blocks,
|
// final JavaPairRDD<String,String> dedupRels = Deduper.computeRelations(sc, blocks,
|
||||||
// dedupConf);
|
// dedupConf);
|
||||||
|
|
||||||
|
@ -73,7 +71,8 @@ public class SparkCreateSimRels {
|
||||||
return r;
|
return r;
|
||||||
});
|
});
|
||||||
|
|
||||||
spark.createDataset(isSimilarToRDD.rdd(), Encoders.bean(Relation.class))
|
spark
|
||||||
|
.createDataset(isSimilarToRDD.rdd(), Encoders.bean(Relation.class))
|
||||||
.write()
|
.write()
|
||||||
.mode("overwrite")
|
.mode("overwrite")
|
||||||
.save(DedupUtility.createSimRelPath(targetPath, entity));
|
.save(DedupUtility.createSimRelPath(targetPath, entity));
|
||||||
|
|
|
@ -41,7 +41,8 @@ public class SparkPropagateRelationsJob {
|
||||||
final String targetRelPath = parser.get("targetRelPath");
|
final String targetRelPath = parser.get("targetRelPath");
|
||||||
|
|
||||||
final Dataset<Relation> merge =
|
final Dataset<Relation> merge =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(mergeRelPath)
|
.load(mergeRelPath)
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.where("relClass == 'merges'");
|
.where("relClass == 'merges'");
|
||||||
|
@ -57,25 +58,20 @@ public class SparkPropagateRelationsJob {
|
||||||
final Relation mergeRelation = r._2();
|
final Relation mergeRelation = r._2();
|
||||||
final Relation relation = r._1();
|
final Relation relation = r._1();
|
||||||
|
|
||||||
if (mergeRelation != null)
|
if (mergeRelation != null) relation.setSource(mergeRelation.getSource());
|
||||||
relation.setSource(mergeRelation.getSource());
|
|
||||||
return relation;
|
return relation;
|
||||||
},
|
},
|
||||||
Encoders.bean(Relation.class));
|
Encoders.bean(Relation.class));
|
||||||
|
|
||||||
final Dataset<Relation> secondJoin =
|
final Dataset<Relation> secondJoin =
|
||||||
firstJoin
|
firstJoin
|
||||||
.joinWith(
|
.joinWith(merge, merge.col("target").equalTo(firstJoin.col("target")), "left_outer")
|
||||||
merge,
|
|
||||||
merge.col("target").equalTo(firstJoin.col("target")),
|
|
||||||
"left_outer")
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||||
r -> {
|
r -> {
|
||||||
final Relation mergeRelation = r._2();
|
final Relation mergeRelation = r._2();
|
||||||
final Relation relation = r._1();
|
final Relation relation = r._1();
|
||||||
if (mergeRelation != null)
|
if (mergeRelation != null) relation.setTarget(mergeRelation.getSource());
|
||||||
relation.setTarget(mergeRelation.getSource());
|
|
||||||
return relation;
|
return relation;
|
||||||
},
|
},
|
||||||
Encoders.bean(Relation.class));
|
Encoders.bean(Relation.class));
|
||||||
|
|
|
@ -44,16 +44,13 @@ public class SparkUpdateEntityJob {
|
||||||
final String entity = parser.get("entity");
|
final String entity = parser.get("entity");
|
||||||
final String destination = parser.get("targetPath");
|
final String destination = parser.get("targetPath");
|
||||||
|
|
||||||
final Dataset<Relation> df =
|
final Dataset<Relation> df = spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||||
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
|
||||||
final JavaPairRDD<String, String> mergedIds =
|
final JavaPairRDD<String, String> mergedIds =
|
||||||
df.where("relClass == 'merges'")
|
df.where("relClass == 'merges'")
|
||||||
.select(df.col("target"))
|
.select(df.col("target"))
|
||||||
.distinct()
|
.distinct()
|
||||||
.toJavaRDD()
|
.toJavaRDD()
|
||||||
.mapToPair(
|
.mapToPair((PairFunction<Row, String, String>) r -> new Tuple2<>(r.getString(0), "d"));
|
||||||
(PairFunction<Row, String, String>)
|
|
||||||
r -> new Tuple2<>(r.getString(0), "d"));
|
|
||||||
final JavaRDD<String> sourceEntity = sc.textFile(entityPath);
|
final JavaRDD<String> sourceEntity = sc.textFile(entityPath);
|
||||||
|
|
||||||
final JavaRDD<String> dedupEntity = sc.textFile(dedupRecordPath);
|
final JavaRDD<String> dedupEntity = sc.textFile(dedupRecordPath);
|
||||||
|
|
|
@ -49,9 +49,7 @@ public class GraphHiveImporterJob {
|
||||||
conf.set("hive.metastore.uris", hiveMetastoreUris);
|
conf.set("hive.metastore.uris", hiveMetastoreUris);
|
||||||
|
|
||||||
runWithSparkHiveSession(
|
runWithSparkHiveSession(
|
||||||
conf,
|
conf, isSparkSessionManaged, spark -> loadGraphAsHiveDB(spark, inputPath, hiveDbName));
|
||||||
isSparkSessionManaged,
|
|
||||||
spark -> loadGraphAsHiveDB(spark, inputPath, hiveDbName));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// protected for testing
|
// protected for testing
|
||||||
|
@ -64,7 +62,8 @@ public class GraphHiveImporterJob {
|
||||||
// Read the input file and convert it into RDD of serializable object
|
// Read the input file and convert it into RDD of serializable object
|
||||||
ModelSupport.oafTypes.forEach(
|
ModelSupport.oafTypes.forEach(
|
||||||
(name, clazz) ->
|
(name, clazz) ->
|
||||||
spark.createDataset(
|
spark
|
||||||
|
.createDataset(
|
||||||
sc.textFile(inputPath + "/" + name)
|
sc.textFile(inputPath + "/" + name)
|
||||||
.map(s -> OBJECT_MAPPER.readValue(s, clazz))
|
.map(s -> OBJECT_MAPPER.readValue(s, clazz))
|
||||||
.rdd(),
|
.rdd(),
|
||||||
|
|
|
@ -67,8 +67,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final Document doc =
|
final Document doc =
|
||||||
DocumentHelper.parseText(
|
DocumentHelper.parseText(
|
||||||
xml.replaceAll(
|
xml.replaceAll(
|
||||||
"http://datacite.org/schema/kernel-4",
|
"http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
|
||||||
"http://datacite.org/schema/kernel-3"));
|
|
||||||
|
|
||||||
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
||||||
final KeyValue collectedFrom =
|
final KeyValue collectedFrom =
|
||||||
|
@ -217,12 +216,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
r.setPid(
|
r.setPid(
|
||||||
prepareListStructProps(
|
prepareListStructProps(
|
||||||
doc,
|
doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info));
|
||||||
"//oaf:identifier",
|
|
||||||
"@identifierType",
|
|
||||||
"dnet:pid_types",
|
|
||||||
"dnet:pid_types",
|
|
||||||
info));
|
|
||||||
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
||||||
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
||||||
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||||
|
@ -302,16 +296,14 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
protected abstract List<StructuredProperty> prepareSoftwareLicenses(Document doc, DataInfo info);
|
||||||
Document doc, DataInfo info);
|
|
||||||
|
|
||||||
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
||||||
Document doc, DataInfo info);
|
Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
||||||
|
|
||||||
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
protected abstract Field<String> prepareDatasetMetadataVersionNumber(Document doc, DataInfo info);
|
||||||
Document doc, DataInfo info);
|
|
||||||
|
|
||||||
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
||||||
|
|
||||||
|
@ -373,9 +365,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
final String classId = n.valueOf(xpathClassId);
|
final String classId = n.valueOf(xpathClassId);
|
||||||
final String className = code2name.get(classId);
|
final String className = code2name.get(classId);
|
||||||
res.add(
|
res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info));
|
||||||
structuredProperty(
|
|
||||||
n.getText(), classId, className, schemeId, schemeName, info));
|
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -409,8 +399,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
||||||
final Node n =
|
final Node n =
|
||||||
doc.selectSingleNode(
|
doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
|
||||||
"//*[local-name()='provenance']/*[local-name()='originDescription']");
|
|
||||||
|
|
||||||
if (n == null) {
|
if (n == null) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -427,8 +416,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final String harvestDate = n.valueOf("@harvestDate");
|
final String harvestDate = n.valueOf("@harvestDate");
|
||||||
;
|
;
|
||||||
|
|
||||||
return oaiIProvenance(
|
return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
||||||
identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DataInfo prepareDataInfo(final Document doc) {
|
protected DataInfo prepareDataInfo(final Document doc) {
|
||||||
|
@ -436,12 +424,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
if (n == null) {
|
if (n == null) {
|
||||||
return dataInfo(
|
return dataInfo(
|
||||||
false,
|
false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9");
|
||||||
null,
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS,
|
|
||||||
"0.9");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
||||||
|
@ -449,8 +432,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
|
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
|
||||||
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
|
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
|
||||||
|
|
||||||
final boolean deletedbyinference =
|
final boolean deletedbyinference = Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
||||||
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
|
||||||
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
|
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
|
||||||
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
||||||
final String trust = n.valueOf("./oaf:trust");
|
final String trust = n.valueOf("./oaf:trust");
|
||||||
|
|
|
@ -60,7 +60,8 @@ public class DispatchEntitiesApplication {
|
||||||
|
|
||||||
log.info("Processing entities ({}) in file: {}", type, sourcePath);
|
log.info("Processing entities ({}) in file: {}", type, sourcePath);
|
||||||
|
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.textFile(sourcePath)
|
.textFile(sourcePath)
|
||||||
.filter((FilterFunction<String>) value -> isEntityType(value, type))
|
.filter((FilterFunction<String>) value -> isEntityType(value, type))
|
||||||
.map(
|
.map(
|
||||||
|
|
|
@ -92,7 +92,8 @@ public class GenerateEntitiesApplication {
|
||||||
.flatMap(list -> list.iterator()));
|
.flatMap(list -> list.iterator()));
|
||||||
}
|
}
|
||||||
|
|
||||||
inputRdd.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
inputRdd
|
||||||
|
.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
||||||
.reduceByKey((o1, o2) -> merge(o1, o2))
|
.reduceByKey((o1, o2) -> merge(o1, o2))
|
||||||
.map(Tuple2::_2)
|
.map(Tuple2::_2)
|
||||||
.map(
|
.map(
|
||||||
|
|
|
@ -81,9 +81,7 @@ public class MergeClaimsApplication {
|
||||||
readFromPath(spark, rawPath, clazz)
|
readFromPath(spark, rawPath, clazz)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<T, Tuple2<String, T>>)
|
(MapFunction<T, Tuple2<String, T>>)
|
||||||
value ->
|
value -> new Tuple2<>(ModelSupport.idFn().apply(value), value),
|
||||||
new Tuple2<>(
|
|
||||||
ModelSupport.idFn().apply(value), value),
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||||
|
|
||||||
final JavaSparkContext jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
@ -92,19 +90,15 @@ public class MergeClaimsApplication {
|
||||||
.getValue()
|
.getValue()
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<T, Tuple2<String, T>>)
|
(MapFunction<T, Tuple2<String, T>>)
|
||||||
value ->
|
value -> new Tuple2<>(ModelSupport.idFn().apply(value), value),
|
||||||
new Tuple2<>(
|
|
||||||
ModelSupport.idFn().apply(value), value),
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||||
|
|
||||||
raw.joinWith(claim, raw.col("_1").equalTo(claim.col("_1")), "full_outer")
|
raw.joinWith(claim, raw.col("_1").equalTo(claim.col("_1")), "full_outer")
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, T>, Tuple2<String, T>>, T>)
|
(MapFunction<Tuple2<Tuple2<String, T>, Tuple2<String, T>>, T>)
|
||||||
value -> {
|
value -> {
|
||||||
Optional<Tuple2<String, T>> opRaw =
|
Optional<Tuple2<String, T>> opRaw = Optional.ofNullable(value._1());
|
||||||
Optional.ofNullable(value._1());
|
Optional<Tuple2<String, T>> opClaim = Optional.ofNullable(value._2());
|
||||||
Optional<Tuple2<String, T>> opClaim =
|
|
||||||
Optional.ofNullable(value._2());
|
|
||||||
|
|
||||||
return opRaw.isPresent()
|
return opRaw.isPresent()
|
||||||
? opRaw.get()._2()
|
? opRaw.get()._2()
|
||||||
|
@ -123,14 +117,13 @@ public class MergeClaimsApplication {
|
||||||
|
|
||||||
private static <T extends Oaf> Dataset<T> readFromPath(
|
private static <T extends Oaf> Dataset<T> readFromPath(
|
||||||
SparkSession spark, String path, Class<T> clazz) {
|
SparkSession spark, String path, Class<T> clazz) {
|
||||||
return spark.read()
|
return spark
|
||||||
|
.read()
|
||||||
.textFile(path)
|
.textFile(path)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz),
|
(MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz),
|
||||||
Encoders.bean(clazz))
|
Encoders.bean(clazz))
|
||||||
.filter(
|
.filter((FilterFunction<T>) value -> Objects.nonNull(ModelSupport.idFn().apply(value)));
|
||||||
(FilterFunction<T>)
|
|
||||||
value -> Objects.nonNull(ModelSupport.idFn().apply(value)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void removeOutputDir(SparkSession spark, String path) {
|
private static void removeOutputDir(SparkSession spark, String path) {
|
||||||
|
|
|
@ -90,8 +90,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
|
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
|
||||||
|
|
||||||
log.info("Processing relations ds <-> orgs ...");
|
log.info("Processing relations ds <-> orgs ...");
|
||||||
smdbe.execute(
|
smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
||||||
"queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
|
||||||
|
|
||||||
log.info("Processing projects <-> orgs ...");
|
log.info("Processing projects <-> orgs ...");
|
||||||
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
|
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
|
||||||
|
@ -117,8 +116,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
|
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
final String sql =
|
final String sql =
|
||||||
IOUtils.toString(
|
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
||||||
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
|
||||||
|
|
||||||
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
|
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
|
||||||
|
|
||||||
|
@ -145,8 +143,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
|
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
|
||||||
ds.setOaiprovenance(null); // Values not present in the DB
|
ds.setOaiprovenance(null); // Values not present in the DB
|
||||||
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
|
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
|
||||||
ds.setOpenairecompatibility(
|
ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
||||||
prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
|
||||||
ds.setOfficialname(field(rs.getString("officialname"), info));
|
ds.setOfficialname(field(rs.getString("officialname"), info));
|
||||||
ds.setEnglishname(field(rs.getString("englishname"), info));
|
ds.setEnglishname(field(rs.getString("englishname"), info));
|
||||||
ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
|
ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
|
||||||
|
@ -180,10 +177,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
ds.setCertificates(field(rs.getString("certificates"), info));
|
ds.setCertificates(field(rs.getString("certificates"), info));
|
||||||
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
|
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
|
||||||
ds.setJournal(
|
ds.setJournal(
|
||||||
prepareJournal(
|
prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal
|
||||||
rs.getString("officialname"),
|
|
||||||
rs.getString("journal"),
|
|
||||||
info)); // Journal
|
|
||||||
ds.setDataInfo(info);
|
ds.setDataInfo(info);
|
||||||
ds.setLastupdatetimestamp(lastUpdateTimestamp);
|
ds.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
|
||||||
|
@ -277,13 +271,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
|
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
|
||||||
o.setEcresearchorganization(
|
o.setEcresearchorganization(
|
||||||
field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
||||||
o.setEchighereducation(
|
o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
||||||
field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
|
||||||
o.setEcinternationalorganizationeurinterests(
|
o.setEcinternationalorganizationeurinterests(
|
||||||
field(
|
field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
|
||||||
Boolean.toString(
|
|
||||||
rs.getBoolean("ecinternationalorganizationeurinterests")),
|
|
||||||
info));
|
|
||||||
o.setEcinternationalorganization(
|
o.setEcinternationalorganization(
|
||||||
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
||||||
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
||||||
|
@ -380,10 +370,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
qualifier(
|
qualifier(
|
||||||
"user:claim",
|
"user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"),
|
||||||
"user:claim",
|
|
||||||
"dnet:provenanceActions",
|
|
||||||
"dnet:provenanceActions"),
|
|
||||||
"0.9");
|
"0.9");
|
||||||
|
|
||||||
final List<KeyValue> collectedFrom =
|
final List<KeyValue> collectedFrom =
|
||||||
|
@ -416,11 +403,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
return Arrays.asList(r);
|
return Arrays.asList(r);
|
||||||
} else {
|
} else {
|
||||||
final String sourceId =
|
final String sourceId =
|
||||||
createOpenaireId(
|
createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false);
|
||||||
rs.getString("source_type"), rs.getString("source_id"), false);
|
|
||||||
final String targetId =
|
final String targetId =
|
||||||
createOpenaireId(
|
createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false);
|
||||||
rs.getString("target_type"), rs.getString("target_id"), false);
|
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
final Relation r2 = new Relation();
|
final Relation r2 = new Relation();
|
||||||
|
@ -496,9 +481,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
private List<Field<String>> prepareListFields(final Array array, final DataInfo info) {
|
private List<Field<String>> prepareListFields(final Array array, final DataInfo info) {
|
||||||
try {
|
try {
|
||||||
return array != null
|
return array != null ? listFields(info, (String[]) array.getArray()) : new ArrayList<>();
|
||||||
? listFields(info, (String[]) array.getArray())
|
|
||||||
: new ArrayList<>();
|
|
||||||
} catch (final SQLException e) {
|
} catch (final SQLException e) {
|
||||||
throw new RuntimeException("Invalid SQL array", e);
|
throw new RuntimeException("Invalid SQL array", e);
|
||||||
}
|
}
|
||||||
|
@ -544,9 +527,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;
|
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;
|
||||||
;
|
;
|
||||||
if (issn != null || eissn != null || lissn != null) {
|
if (issn != null || eissn != null || lissn != null) {
|
||||||
return journal(
|
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
||||||
name, issn, eissn, eissn, null, null, null, null, null, null, null,
|
|
||||||
info);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,8 +42,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
|
||||||
}
|
}
|
||||||
|
|
||||||
public MigrateMongoMdstoresApplication(
|
public MigrateMongoMdstoresApplication(
|
||||||
final String hdfsPath, final String mongoBaseUrl, final String mongoDb)
|
final String hdfsPath, final String mongoBaseUrl, final String mongoDb) throws Exception {
|
||||||
throws Exception {
|
|
||||||
super(hdfsPath);
|
super(hdfsPath);
|
||||||
this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
|
this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
|
||||||
}
|
}
|
||||||
|
@ -54,12 +53,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
|
||||||
log.info("Found " + colls.size() + " mdstores");
|
log.info("Found " + colls.size() + " mdstores");
|
||||||
|
|
||||||
for (final Entry<String, String> entry : colls.entrySet()) {
|
for (final Entry<String, String> entry : colls.entrySet()) {
|
||||||
log.info(
|
log.info("Processing mdstore " + entry.getKey() + " (collection: " + entry.getValue() + ")");
|
||||||
"Processing mdstore "
|
|
||||||
+ entry.getKey()
|
|
||||||
+ " (collection: "
|
|
||||||
+ entry.getValue()
|
|
||||||
+ ")");
|
|
||||||
final String currentColl = entry.getValue();
|
final String currentColl = entry.getValue();
|
||||||
|
|
||||||
for (final String xml : mdstoreClient.listRecords(currentColl)) {
|
for (final String xml : mdstoreClient.listRecords(currentColl)) {
|
||||||
|
|
|
@ -110,11 +110,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||||
instance.setAccessright(
|
instance.setAccessright(
|
||||||
prepareQualifier(
|
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
||||||
doc,
|
|
||||||
"//oaf:accessrights",
|
|
||||||
"dnet:access_modes",
|
|
||||||
"dnet:access_modes"));
|
|
||||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||||
instance.setProcessingchargeamount(
|
instance.setProcessingchargeamount(
|
||||||
|
@ -133,16 +129,14 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<StructuredProperty> prepareRelevantDates(
|
protected List<StructuredProperty> prepareRelevantDates(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
return new ArrayList<>(); // NOT PRESENT IN OAF
|
return new ArrayList<>(); // NOT PRESENT IN OAF
|
||||||
}
|
}
|
||||||
|
|
||||||
// SOFTWARES
|
// SOFTWARES
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Qualifier prepareSoftwareProgrammingLanguage(
|
protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
return null; // NOT PRESENT IN OAF
|
return null; // NOT PRESENT IN OAF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,8 +160,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
// DATASETS
|
// DATASETS
|
||||||
@Override
|
@Override
|
||||||
protected List<GeoLocation> prepareDatasetGeoLocations(
|
protected List<GeoLocation> prepareDatasetGeoLocations(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
return new ArrayList<>(); // NOT PRESENT IN OAF
|
return new ArrayList<>(); // NOT PRESENT IN OAF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,10 +58,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
structuredProperty(
|
structuredProperty(
|
||||||
((Node) o).getText(),
|
((Node) o).getText(),
|
||||||
prepareQualifier(
|
prepareQualifier(
|
||||||
(Node) o,
|
(Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"),
|
||||||
"./@nameIdentifierScheme",
|
|
||||||
"dnet:pid_types",
|
|
||||||
"dnet:pid_types"),
|
|
||||||
info));
|
info));
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
@ -78,21 +75,16 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
instance.setUrl(new ArrayList<>());
|
instance.setUrl(new ArrayList<>());
|
||||||
instance.setInstancetype(
|
instance.setInstancetype(
|
||||||
prepareQualifier(
|
prepareQualifier(
|
||||||
doc,
|
doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource"));
|
||||||
"//dr:CobjCategory",
|
|
||||||
"dnet:publication_resource",
|
|
||||||
"dnet:publication_resource"));
|
|
||||||
instance.setCollectedfrom(collectedfrom);
|
instance.setCollectedfrom(collectedfrom);
|
||||||
instance.setHostedby(hostedby);
|
instance.setHostedby(hostedby);
|
||||||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||||
instance.setAccessright(
|
instance.setAccessright(
|
||||||
prepareQualifier(
|
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
||||||
doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
|
||||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||||
instance.setProcessingchargeamount(
|
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
|
||||||
field(doc.valueOf("//oaf:processingchargeamount"), info));
|
|
||||||
instance.setProcessingchargecurrency(
|
instance.setProcessingchargecurrency(
|
||||||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||||
|
|
||||||
|
@ -119,8 +111,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<StructuredProperty> prepareRelevantDates(
|
protected List<StructuredProperty> prepareRelevantDates(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final List<StructuredProperty> res = new ArrayList<>();
|
||||||
for (final Object o : doc.selectNodes("//datacite:date")) {
|
for (final Object o : doc.selectNodes("//datacite:date")) {
|
||||||
final String dateType = ((Node) o).valueOf("@dateType");
|
final String dateType = ((Node) o).valueOf("@dateType");
|
||||||
|
@ -202,13 +193,9 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Qualifier prepareSoftwareProgrammingLanguage(
|
protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
return prepareQualifier(
|
return prepareQualifier(
|
||||||
doc,
|
doc, "//datacite:format", "dnet:programming_languages", "dnet:programming_languages");
|
||||||
"//datacite:format",
|
|
||||||
"dnet:programming_languages",
|
|
||||||
"dnet:programming_languages");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -235,8 +222,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
// DATASETS
|
// DATASETS
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<GeoLocation> prepareDatasetGeoLocations(
|
protected List<GeoLocation> prepareDatasetGeoLocations(final Document doc, final DataInfo info) {
|
||||||
final Document doc, final DataInfo info) {
|
|
||||||
final List<GeoLocation> res = new ArrayList<>();
|
final List<GeoLocation> res = new ArrayList<>();
|
||||||
|
|
||||||
for (final Object o : doc.selectNodes("//datacite:geoLocation")) {
|
for (final Object o : doc.selectNodes("//datacite:geoLocation")) {
|
||||||
|
@ -293,8 +279,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
final List<Oaf> res = new ArrayList<>();
|
final List<Oaf> res = new ArrayList<>();
|
||||||
|
|
||||||
for (final Object o :
|
for (final Object o :
|
||||||
doc.selectNodes(
|
doc.selectNodes("//datacite:relatedIdentifier[@relatedIdentifierType='OPENAIRE']")) {
|
||||||
"//datacite:relatedIdentifier[@relatedIdentifierType='OPENAIRE']")) {
|
|
||||||
|
|
||||||
final String originalId = ((Node) o).getText();
|
final String originalId = ((Node) o).getText();
|
||||||
|
|
||||||
|
@ -324,22 +309,10 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
} else if (type.equals("IsPartOf")) {
|
} else if (type.equals("IsPartOf")) {
|
||||||
res.add(
|
res.add(
|
||||||
prepareOtherResultRel(
|
prepareOtherResultRel(
|
||||||
collectedFrom,
|
collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf"));
|
||||||
info,
|
|
||||||
lastUpdateTimestamp,
|
|
||||||
docId,
|
|
||||||
otherId,
|
|
||||||
"part",
|
|
||||||
"IsPartOf"));
|
|
||||||
res.add(
|
res.add(
|
||||||
prepareOtherResultRel(
|
prepareOtherResultRel(
|
||||||
collectedFrom,
|
collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts"));
|
||||||
info,
|
|
||||||
lastUpdateTimestamp,
|
|
||||||
otherId,
|
|
||||||
docId,
|
|
||||||
"part",
|
|
||||||
"HasParts"));
|
|
||||||
} else {
|
} else {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,8 +45,9 @@ public class AbstractMigrationApplication implements Closeable {
|
||||||
private Configuration getConf() throws IOException {
|
private Configuration getConf() throws IOException {
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
/*
|
/*
|
||||||
* conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
* conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.hdfs.impl",
|
||||||
* conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); System.setProperty("HADOOP_USER_NAME", hdfsUser);
|
* org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.file.impl",
|
||||||
|
* org.apache.hadoop.fs.LocalFileSystem.class.getName()); System.setProperty("HADOOP_USER_NAME", hdfsUser);
|
||||||
* System.setProperty("hadoop.home.dir", "/"); FileSystem.get(URI.create(hdfsNameNode), conf);
|
* System.setProperty("hadoop.home.dir", "/"); FileSystem.get(URI.create(hdfsNameNode), conf);
|
||||||
*/
|
*/
|
||||||
return conf;
|
return conf;
|
||||||
|
@ -64,9 +65,7 @@ public class AbstractMigrationApplication implements Closeable {
|
||||||
|
|
||||||
protected void emitOaf(final Oaf oaf) {
|
protected void emitOaf(final Oaf oaf) {
|
||||||
try {
|
try {
|
||||||
emit(
|
emit(objectMapper.writeValueAsString(oaf), oaf.getClass().getSimpleName().toLowerCase());
|
||||||
objectMapper.writeValueAsString(oaf),
|
|
||||||
oaf.getClass().getSimpleName().toLowerCase());
|
|
||||||
} catch (final Exception e) {
|
} catch (final Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,9 +71,7 @@ public class MdstoreClient implements Closeable {
|
||||||
if (!Iterables.contains(db.listCollectionNames(), collName)) {
|
if (!Iterables.contains(db.listCollectionNames(), collName)) {
|
||||||
final String err =
|
final String err =
|
||||||
String.format(
|
String.format(
|
||||||
String.format(
|
String.format("Missing collection '%s' in database '%s'", collName, db.getName()));
|
||||||
"Missing collection '%s' in database '%s'",
|
|
||||||
collName, db.getName()));
|
|
||||||
log.warn(err);
|
log.warn(err);
|
||||||
if (abortIfMissing) {
|
if (abortIfMissing) {
|
||||||
throw new RuntimeException(err);
|
throw new RuntimeException(err);
|
||||||
|
|
|
@ -7,27 +7,25 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
public class MigrationConstants {
|
public class MigrationConstants {
|
||||||
|
|
||||||
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
||||||
qualifier(
|
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
|
||||||
"publication",
|
|
||||||
"publication",
|
|
||||||
"dnet:result_typologies",
|
|
||||||
"dnet:result_typologies");
|
|
||||||
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier(
|
||||||
|
"dataset", "dataset",
|
||||||
|
"dnet:result_typologies", "dnet:result_typologies");
|
||||||
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier(
|
||||||
|
"software", "software",
|
||||||
|
"dnet:result_typologies", "dnet:result_typologies");
|
||||||
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
||||||
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
qualifier(
|
||||||
|
"other", "other",
|
||||||
|
"dnet:result_typologies", "dnet:result_typologies");
|
||||||
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
||||||
qualifier(
|
qualifier(
|
||||||
"sysimport:crosswalk:repository",
|
"sysimport:crosswalk:repository", "sysimport:crosswalk:repository",
|
||||||
"sysimport:crosswalk:repository",
|
"dnet:provenanceActions", "dnet:provenanceActions");
|
||||||
"dnet:provenanceActions",
|
|
||||||
"dnet:provenanceActions");
|
|
||||||
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
||||||
qualifier(
|
qualifier(
|
||||||
"sysimport:crosswalk:entityregistry",
|
"sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry",
|
||||||
"sysimport:crosswalk:entityregistry",
|
"dnet:provenanceActions", "dnet:provenanceActions");
|
||||||
"dnet:provenanceActions",
|
|
||||||
"dnet:provenanceActions");
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,8 +76,7 @@ public class OafMapperUtils {
|
||||||
final String schemename,
|
final String schemename,
|
||||||
final DataInfo dataInfo) {
|
final DataInfo dataInfo) {
|
||||||
|
|
||||||
return structuredProperty(
|
return structuredProperty(value, qualifier(classid, classname, schemeid, schemename), dataInfo);
|
||||||
value, qualifier(classid, classname, schemeid, schemename), dataInfo);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static StructuredProperty structuredProperty(
|
public static StructuredProperty structuredProperty(
|
||||||
|
|
|
@ -34,8 +34,7 @@ public class PacePerson {
|
||||||
public static Set<String> loadFromClasspath(final String classpath) {
|
public static Set<String> loadFromClasspath(final String classpath) {
|
||||||
final Set<String> h = new HashSet<>();
|
final Set<String> h = new HashSet<>();
|
||||||
try {
|
try {
|
||||||
for (final String s :
|
for (final String s : IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) {
|
||||||
IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) {
|
|
||||||
h.add(s);
|
h.add(s);
|
||||||
}
|
}
|
||||||
} catch (final Throwable e) {
|
} catch (final Throwable e) {
|
||||||
|
|
|
@ -79,7 +79,8 @@ public class ImportDataFromMongo {
|
||||||
.is(interpretation)
|
.is(interpretation)
|
||||||
.get();
|
.get();
|
||||||
final List<String> ids = new ArrayList<>();
|
final List<String> ids = new ArrayList<>();
|
||||||
metadata.find((Bson) query)
|
metadata
|
||||||
|
.find((Bson) query)
|
||||||
.forEach((Consumer<Document>) document -> ids.add(document.getString("mdId")));
|
.forEach((Consumer<Document>) document -> ids.add(document.getString("mdId")));
|
||||||
List<String> databaseId =
|
List<String> databaseId =
|
||||||
ids.stream()
|
ids.stream()
|
||||||
|
@ -121,8 +122,7 @@ public class ImportDataFromMongo {
|
||||||
value.set(document.getString("body"));
|
value.set(document.getString("body"));
|
||||||
|
|
||||||
if (counter.get() % 10000 == 0) {
|
if (counter.get() % 10000 == 0) {
|
||||||
System.out.println(
|
System.out.println("Added " + counter.get());
|
||||||
"Added " + counter.get());
|
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
writer.append(key, value);
|
writer.append(key, value);
|
||||||
|
@ -138,8 +138,7 @@ public class ImportDataFromMongo {
|
||||||
* Return the name of mongo collection giving an MdStore ID
|
* Return the name of mongo collection giving an MdStore ID
|
||||||
*
|
*
|
||||||
* @param mdId The id of the MDStore
|
* @param mdId The id of the MDStore
|
||||||
* @param metadataManager The collection metadataManager on mongo which contains this
|
* @param metadataManager The collection metadataManager on mongo which contains this information
|
||||||
* information
|
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
private static String getCurrentId(
|
private static String getCurrentId(
|
||||||
|
|
|
@ -56,23 +56,27 @@ public class SparkExtractEntitiesJob {
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
if (entities.stream().anyMatch("dataset"::equalsIgnoreCase)) {
|
if (entities.stream().anyMatch("dataset"::equalsIgnoreCase)) {
|
||||||
// Extract Dataset
|
// Extract Dataset
|
||||||
inputRDD.filter(SparkExtractEntitiesJob::isDataset)
|
inputRDD
|
||||||
|
.filter(SparkExtractEntitiesJob::isDataset)
|
||||||
.saveAsTextFile(targetPath + "/dataset/" + tdir, GzipCodec.class);
|
.saveAsTextFile(targetPath + "/dataset/" + tdir, GzipCodec.class);
|
||||||
}
|
}
|
||||||
if (entities.stream().anyMatch("unknown"::equalsIgnoreCase)) {
|
if (entities.stream().anyMatch("unknown"::equalsIgnoreCase)) {
|
||||||
// Extract Unknown
|
// Extract Unknown
|
||||||
inputRDD.filter(SparkExtractEntitiesJob::isUnknown)
|
inputRDD
|
||||||
|
.filter(SparkExtractEntitiesJob::isUnknown)
|
||||||
.saveAsTextFile(targetPath + "/unknown/" + tdir, GzipCodec.class);
|
.saveAsTextFile(targetPath + "/unknown/" + tdir, GzipCodec.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entities.stream().anyMatch("relation"::equalsIgnoreCase)) {
|
if (entities.stream().anyMatch("relation"::equalsIgnoreCase)) {
|
||||||
// Extract Relation
|
// Extract Relation
|
||||||
inputRDD.filter(SparkExtractEntitiesJob::isRelation)
|
inputRDD
|
||||||
|
.filter(SparkExtractEntitiesJob::isRelation)
|
||||||
.saveAsTextFile(targetPath + "/relation/" + tdir, GzipCodec.class);
|
.saveAsTextFile(targetPath + "/relation/" + tdir, GzipCodec.class);
|
||||||
}
|
}
|
||||||
if (entities.stream().anyMatch("publication"::equalsIgnoreCase)) {
|
if (entities.stream().anyMatch("publication"::equalsIgnoreCase)) {
|
||||||
// Extract Relation
|
// Extract Relation
|
||||||
inputRDD.filter(SparkExtractEntitiesJob::isPublication)
|
inputRDD
|
||||||
|
.filter(SparkExtractEntitiesJob::isPublication)
|
||||||
.saveAsTextFile(targetPath + "/publication/" + tdir, GzipCodec.class);
|
.saveAsTextFile(targetPath + "/publication/" + tdir, GzipCodec.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,8 +41,7 @@ public class SparkSXGeneratePidSimlarity {
|
||||||
.filter(
|
.filter(
|
||||||
t ->
|
t ->
|
||||||
!StringUtils.substringAfter(t._1(), "|")
|
!StringUtils.substringAfter(t._1(), "|")
|
||||||
.equalsIgnoreCase(
|
.equalsIgnoreCase(StringUtils.substringAfter(t._2(), "::")))
|
||||||
StringUtils.substringAfter(t._2(), "::")))
|
|
||||||
.distinct();
|
.distinct();
|
||||||
|
|
||||||
final JavaPairRDD<String, String> publicationSimRel =
|
final JavaPairRDD<String, String> publicationSimRel =
|
||||||
|
@ -56,8 +55,7 @@ public class SparkSXGeneratePidSimlarity {
|
||||||
.filter(
|
.filter(
|
||||||
t ->
|
t ->
|
||||||
!StringUtils.substringAfter(t._1(), "|")
|
!StringUtils.substringAfter(t._1(), "|")
|
||||||
.equalsIgnoreCase(
|
.equalsIgnoreCase(StringUtils.substringAfter(t._2(), "::")))
|
||||||
StringUtils.substringAfter(t._2(), "::")))
|
|
||||||
.distinct();
|
.distinct();
|
||||||
|
|
||||||
JavaRDD<DLIRelation> simRel =
|
JavaRDD<DLIRelation> simRel =
|
||||||
|
@ -71,7 +69,8 @@ public class SparkSXGeneratePidSimlarity {
|
||||||
r.setRelType("similar");
|
r.setRelType("similar");
|
||||||
return r;
|
return r;
|
||||||
});
|
});
|
||||||
spark.createDataset(simRel.rdd(), Encoders.bean(DLIRelation.class))
|
spark
|
||||||
|
.createDataset(simRel.rdd(), Encoders.bean(DLIRelation.class))
|
||||||
.distinct()
|
.distinct()
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
|
|
|
@ -65,9 +65,7 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
SparkSession.builder()
|
SparkSession.builder()
|
||||||
.config(
|
.config(
|
||||||
new SparkConf()
|
new SparkConf()
|
||||||
.set(
|
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"))
|
||||||
"spark.serializer",
|
|
||||||
"org.apache.spark.serializer.KryoSerializer"))
|
|
||||||
.appName(SparkScholexplorerCreateRawGraphJob.class.getSimpleName())
|
.appName(SparkScholexplorerCreateRawGraphJob.class.getSimpleName())
|
||||||
.master(parser.get("master"))
|
.master(parser.get("master"))
|
||||||
.getOrCreate();
|
.getOrCreate();
|
||||||
|
@ -89,17 +87,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
}
|
}
|
||||||
switch (entity) {
|
switch (entity) {
|
||||||
case "dataset":
|
case "dataset":
|
||||||
union.mapToPair(
|
union
|
||||||
|
.mapToPair(
|
||||||
(PairFunction<String, String, DLIDataset>)
|
(PairFunction<String, String, DLIDataset>)
|
||||||
f -> {
|
f -> {
|
||||||
final String id = getJPathString(IDJSONPATH, f);
|
final String id = getJPathString(IDJSONPATH, f);
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
DeserializationFeature
|
return new Tuple2<>(id, mapper.readValue(f, DLIDataset.class));
|
||||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
|
||||||
false);
|
|
||||||
return new Tuple2<>(
|
|
||||||
id, mapper.readValue(f, DLIDataset.class));
|
|
||||||
})
|
})
|
||||||
.reduceByKey(
|
.reduceByKey(
|
||||||
(a, b) -> {
|
(a, b) -> {
|
||||||
|
@ -114,17 +109,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
.saveAsTextFile(targetPath, GzipCodec.class);
|
.saveAsTextFile(targetPath, GzipCodec.class);
|
||||||
break;
|
break;
|
||||||
case "publication":
|
case "publication":
|
||||||
union.mapToPair(
|
union
|
||||||
|
.mapToPair(
|
||||||
(PairFunction<String, String, DLIPublication>)
|
(PairFunction<String, String, DLIPublication>)
|
||||||
f -> {
|
f -> {
|
||||||
final String id = getJPathString(IDJSONPATH, f);
|
final String id = getJPathString(IDJSONPATH, f);
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
DeserializationFeature
|
return new Tuple2<>(id, mapper.readValue(f, DLIPublication.class));
|
||||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
|
||||||
false);
|
|
||||||
return new Tuple2<>(
|
|
||||||
id, mapper.readValue(f, DLIPublication.class));
|
|
||||||
})
|
})
|
||||||
.reduceByKey(
|
.reduceByKey(
|
||||||
(a, b) -> {
|
(a, b) -> {
|
||||||
|
@ -139,17 +131,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
.saveAsTextFile(targetPath, GzipCodec.class);
|
.saveAsTextFile(targetPath, GzipCodec.class);
|
||||||
break;
|
break;
|
||||||
case "unknown":
|
case "unknown":
|
||||||
union.mapToPair(
|
union
|
||||||
|
.mapToPair(
|
||||||
(PairFunction<String, String, DLIUnknown>)
|
(PairFunction<String, String, DLIUnknown>)
|
||||||
f -> {
|
f -> {
|
||||||
final String id = getJPathString(IDJSONPATH, f);
|
final String id = getJPathString(IDJSONPATH, f);
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
DeserializationFeature
|
return new Tuple2<>(id, mapper.readValue(f, DLIUnknown.class));
|
||||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
|
||||||
false);
|
|
||||||
return new Tuple2<>(
|
|
||||||
id, mapper.readValue(f, DLIUnknown.class));
|
|
||||||
})
|
})
|
||||||
.reduceByKey(
|
.reduceByKey(
|
||||||
(a, b) -> {
|
(a, b) -> {
|
||||||
|
@ -165,25 +154,18 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
break;
|
break;
|
||||||
case "relation":
|
case "relation":
|
||||||
SparkSXGeneratePidSimlarity.generateDataFrame(
|
SparkSXGeneratePidSimlarity.generateDataFrame(
|
||||||
spark,
|
spark, sc, inputPath.replace("/relation", ""), targetPath.replace("/relation", ""));
|
||||||
sc,
|
|
||||||
inputPath.replace("/relation", ""),
|
|
||||||
targetPath.replace("/relation", ""));
|
|
||||||
RDD<DLIRelation> rdd =
|
RDD<DLIRelation> rdd =
|
||||||
union.mapToPair(
|
union
|
||||||
|
.mapToPair(
|
||||||
(PairFunction<String, String, DLIRelation>)
|
(PairFunction<String, String, DLIRelation>)
|
||||||
f -> {
|
f -> {
|
||||||
final String source =
|
final String source = getJPathString(SOURCEJSONPATH, f);
|
||||||
getJPathString(SOURCEJSONPATH, f);
|
final String target = getJPathString(TARGETJSONPATH, f);
|
||||||
final String target =
|
final String reltype = getJPathString(RELJSONPATH, f);
|
||||||
getJPathString(TARGETJSONPATH, f);
|
|
||||||
final String reltype =
|
|
||||||
getJPathString(RELJSONPATH, f);
|
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
mapper.configure(
|
mapper.configure(
|
||||||
DeserializationFeature
|
DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
|
||||||
false);
|
|
||||||
return new Tuple2<>(
|
return new Tuple2<>(
|
||||||
DHPUtils.md5(
|
DHPUtils.md5(
|
||||||
String.format(
|
String.format(
|
||||||
|
@ -201,17 +183,17 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
.map(Tuple2::_2)
|
.map(Tuple2::_2)
|
||||||
.rdd();
|
.rdd();
|
||||||
|
|
||||||
spark.createDataset(rdd, Encoders.bean(DLIRelation.class))
|
spark
|
||||||
|
.createDataset(rdd, Encoders.bean(DLIRelation.class))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.save(targetPath);
|
.save(targetPath);
|
||||||
Dataset<Relation> rel_ds =
|
Dataset<Relation> rel_ds = spark.read().load(targetPath).as(Encoders.bean(Relation.class));
|
||||||
spark.read().load(targetPath).as(Encoders.bean(Relation.class));
|
|
||||||
|
|
||||||
System.out.println(
|
System.out.println("LOADING PATH :" + targetPath.replace("/relation", "") + "/pid_simRel");
|
||||||
"LOADING PATH :" + targetPath.replace("/relation", "") + "/pid_simRel");
|
|
||||||
Dataset<Relation> sim_ds =
|
Dataset<Relation> sim_ds =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(targetPath.replace("/relation", "") + "/pid_simRel")
|
.load(targetPath.replace("/relation", "") + "/pid_simRel")
|
||||||
.as(Encoders.bean(Relation.class));
|
.as(Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
@ -219,24 +201,18 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
sim_ds.map(
|
sim_ds.map(
|
||||||
(MapFunction<Relation, Relation>)
|
(MapFunction<Relation, Relation>)
|
||||||
relation -> {
|
relation -> {
|
||||||
final String type =
|
final String type = StringUtils.substringBefore(relation.getSource(), "|");
|
||||||
StringUtils.substringBefore(
|
|
||||||
relation.getSource(), "|");
|
|
||||||
relation.setTarget(
|
relation.setTarget(
|
||||||
String.format(
|
String.format(
|
||||||
"%s|%s",
|
"%s|%s",
|
||||||
type,
|
type, StringUtils.substringAfter(relation.getTarget(), "::")));
|
||||||
StringUtils.substringAfter(
|
|
||||||
relation.getTarget(), "::")));
|
|
||||||
return relation;
|
return relation;
|
||||||
},
|
},
|
||||||
Encoders.bean(Relation.class));
|
Encoders.bean(Relation.class));
|
||||||
|
|
||||||
final Dataset<Relation> firstJoin =
|
final Dataset<Relation> firstJoin =
|
||||||
rel_ds.joinWith(
|
rel_ds
|
||||||
ids,
|
.joinWith(ids, ids.col("target").equalTo(rel_ds.col("source")), "left_outer")
|
||||||
ids.col("target").equalTo(rel_ds.col("source")),
|
|
||||||
"left_outer")
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||||
s -> {
|
s -> {
|
||||||
|
@ -249,10 +225,7 @@ public class SparkScholexplorerCreateRawGraphJob {
|
||||||
|
|
||||||
Dataset<Relation> secondJoin =
|
Dataset<Relation> secondJoin =
|
||||||
firstJoin
|
firstJoin
|
||||||
.joinWith(
|
.joinWith(ids, ids.col("target").equalTo(firstJoin.col("target")), "left_outer")
|
||||||
ids,
|
|
||||||
ids.col("target").equalTo(firstJoin.col("target")),
|
|
||||||
"left_outer")
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||||
s -> {
|
s -> {
|
||||||
|
|
|
@ -49,16 +49,13 @@ public class SparkScholexplorerGraphImporter {
|
||||||
record -> {
|
record -> {
|
||||||
switch (parser.get("entity")) {
|
switch (parser.get("entity")) {
|
||||||
case "dataset":
|
case "dataset":
|
||||||
final DatasetScholexplorerParser d =
|
final DatasetScholexplorerParser d = new DatasetScholexplorerParser();
|
||||||
new DatasetScholexplorerParser();
|
|
||||||
return d.parseObject(record, relationMapper).iterator();
|
return d.parseObject(record, relationMapper).iterator();
|
||||||
case "publication":
|
case "publication":
|
||||||
final PublicationScholexplorerParser p =
|
final PublicationScholexplorerParser p = new PublicationScholexplorerParser();
|
||||||
new PublicationScholexplorerParser();
|
|
||||||
return p.parseObject(record, relationMapper).iterator();
|
return p.parseObject(record, relationMapper).iterator();
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException("wrong values of entities");
|
||||||
"wrong values of entities");
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map(
|
.map(
|
||||||
|
|
|
@ -114,8 +114,7 @@ public abstract class AbstractScholexplorerParser {
|
||||||
|
|
||||||
return type
|
return type
|
||||||
+ DHPUtils.md5(
|
+ DHPUtils.md5(
|
||||||
String.format(
|
String.format("%s::%s", pid.toLowerCase().trim(), pidType.toLowerCase().trim()));
|
||||||
"%s::%s", pid.toLowerCase().trim(), pidType.toLowerCase().trim()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected DLIUnknown createUnknownObject(
|
protected DLIUnknown createUnknownObject(
|
||||||
|
@ -161,22 +160,15 @@ public abstract class AbstractScholexplorerParser {
|
||||||
DLIRelation r = new DLIRelation();
|
DLIRelation r = new DLIRelation();
|
||||||
r.setSource(parsedObject.getId());
|
r.setSource(parsedObject.getId());
|
||||||
final String relatedPid = n.getTextValue();
|
final String relatedPid = n.getTextValue();
|
||||||
final String relatedPidType =
|
final String relatedPidType = n.getAttributes().get("relatedIdentifierType");
|
||||||
n.getAttributes().get("relatedIdentifierType");
|
|
||||||
final String relatedType =
|
final String relatedType =
|
||||||
n.getAttributes()
|
n.getAttributes().getOrDefault("entityType", "unknown");
|
||||||
.getOrDefault("entityType", "unknown");
|
String relationSemantic = n.getAttributes().get("relationType");
|
||||||
String relationSemantic =
|
|
||||||
n.getAttributes().get("relationType");
|
|
||||||
String inverseRelation;
|
String inverseRelation;
|
||||||
final String targetId =
|
final String targetId = generateId(relatedPid, relatedPidType, relatedType);
|
||||||
generateId(relatedPid, relatedPidType, relatedType);
|
|
||||||
r.setDateOfCollection(dateOfCollection);
|
r.setDateOfCollection(dateOfCollection);
|
||||||
if (relationMapper.containsKey(
|
if (relationMapper.containsKey(relationSemantic.toLowerCase())) {
|
||||||
relationSemantic.toLowerCase())) {
|
RelInfo relInfo = relationMapper.get(relationSemantic.toLowerCase());
|
||||||
RelInfo relInfo =
|
|
||||||
relationMapper.get(
|
|
||||||
relationSemantic.toLowerCase());
|
|
||||||
relationSemantic = relInfo.getOriginal();
|
relationSemantic = relInfo.getOriginal();
|
||||||
inverseRelation = relInfo.getInverse();
|
inverseRelation = relInfo.getInverse();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -37,8 +37,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
|
|
||||||
parsedObject.setOriginalId(
|
parsedObject.setOriginalId(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
VtdUtilityParser.getSingleValue(
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||||
ap, vn, "//*[local-name()='recordIdentifier']")));
|
|
||||||
|
|
||||||
parsedObject.setOriginalObjIdentifier(
|
parsedObject.setOriginalObjIdentifier(
|
||||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='objIdentifier']"));
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='objIdentifier']"));
|
||||||
|
@ -96,8 +95,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
provenance.setId(it.getAttributes().get("id"));
|
provenance.setId(it.getAttributes().get("id"));
|
||||||
provenance.setName(it.getAttributes().get("name"));
|
provenance.setName(it.getAttributes().get("name"));
|
||||||
provenance.setCollectionMode(provisionMode);
|
provenance.setCollectionMode(provisionMode);
|
||||||
provenance.setCompletionStatus(
|
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||||
it.getAttributes().get("completionStatus"));
|
|
||||||
provenances.add(provenance);
|
provenances.add(provenance);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -109,8 +107,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
provenance.setId(it.getAttributes().get("id"));
|
provenance.setId(it.getAttributes().get("id"));
|
||||||
provenance.setName(it.getAttributes().get("name"));
|
provenance.setName(it.getAttributes().get("name"));
|
||||||
provenance.setCollectionMode("resolved");
|
provenance.setCollectionMode("resolved");
|
||||||
provenance.setCompletionStatus(
|
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||||
it.getAttributes().get("completionStatus"));
|
|
||||||
provenances.add(provenance);
|
provenances.add(provenance);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -127,8 +124,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
})
|
})
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
parsedObject.setCompletionStatus(
|
parsedObject.setCompletionStatus(
|
||||||
VtdUtilityParser.getSingleValue(
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='completionStatus']"));
|
||||||
ap, vn, "//*[local-name()='completionStatus']"));
|
|
||||||
|
|
||||||
final List<Node> identifierType =
|
final List<Node> identifierType =
|
||||||
VtdUtilityParser.getTextValuesWithAttributes(
|
VtdUtilityParser.getTextValuesWithAttributes(
|
||||||
|
@ -143,14 +139,10 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
parsedObject.setPid(Collections.singletonList(currentPid));
|
parsedObject.setPid(Collections.singletonList(currentPid));
|
||||||
|
|
||||||
final String sourceId =
|
final String sourceId =
|
||||||
generateId(
|
generateId(currentPid.getValue(), currentPid.getQualifier().getClassid(), "dataset");
|
||||||
currentPid.getValue(),
|
|
||||||
currentPid.getQualifier().getClassid(),
|
|
||||||
"dataset");
|
|
||||||
parsedObject.setId(sourceId);
|
parsedObject.setId(sourceId);
|
||||||
|
|
||||||
List<String> descs =
|
List<String> descs = VtdUtilityParser.getTextValue(ap, vn, "//*[local-name()='description']");
|
||||||
VtdUtilityParser.getTextValue(ap, vn, "//*[local-name()='description']");
|
|
||||||
if (descs != null && descs.size() > 0)
|
if (descs != null && descs.size() > 0)
|
||||||
parsedObject.setDescription(
|
parsedObject.setDescription(
|
||||||
descs.stream()
|
descs.stream()
|
||||||
|
@ -169,10 +161,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
vn,
|
vn,
|
||||||
"//*[local-name()='relatedIdentifier']",
|
"//*[local-name()='relatedIdentifier']",
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
"relatedIdentifierType",
|
"relatedIdentifierType", "relationType", "entityType", "inverseRelationType"));
|
||||||
"relationType",
|
|
||||||
"entityType",
|
|
||||||
"inverseRelationType"));
|
|
||||||
|
|
||||||
generateRelations(
|
generateRelations(
|
||||||
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
||||||
|
@ -187,9 +176,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
.map(
|
.map(
|
||||||
it -> {
|
it -> {
|
||||||
final Instance i = new Instance();
|
final Instance i = new Instance();
|
||||||
i.setUrl(
|
i.setUrl(Collections.singletonList(currentPid.getValue()));
|
||||||
Collections.singletonList(
|
|
||||||
currentPid.getValue()));
|
|
||||||
KeyValue h = new KeyValue();
|
KeyValue h = new KeyValue();
|
||||||
i.setHostedby(h);
|
i.setHostedby(h);
|
||||||
h.setKey(it.getAttributes().get("id"));
|
h.setKey(it.getAttributes().get("id"));
|
||||||
|
|
|
@ -43,8 +43,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='resolvedDate']");
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='resolvedDate']");
|
||||||
parsedObject.setOriginalId(
|
parsedObject.setOriginalId(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
VtdUtilityParser.getSingleValue(
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||||
ap, vn, "//*[local-name()='recordIdentifier']")));
|
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(resolvedDate)) {
|
if (StringUtils.isNotBlank(resolvedDate)) {
|
||||||
StructuredProperty currentDate = new StructuredProperty();
|
StructuredProperty currentDate = new StructuredProperty();
|
||||||
|
@ -67,10 +66,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
inferPid(currentPid);
|
inferPid(currentPid);
|
||||||
parsedObject.setPid(Collections.singletonList(currentPid));
|
parsedObject.setPid(Collections.singletonList(currentPid));
|
||||||
final String sourceId =
|
final String sourceId =
|
||||||
generateId(
|
generateId(currentPid.getValue(), currentPid.getQualifier().getClassid(), "publication");
|
||||||
currentPid.getValue(),
|
|
||||||
currentPid.getQualifier().getClassid(),
|
|
||||||
"publication");
|
|
||||||
parsedObject.setId(sourceId);
|
parsedObject.setId(sourceId);
|
||||||
|
|
||||||
parsedObject.setOriginalObjIdentifier(
|
parsedObject.setOriginalObjIdentifier(
|
||||||
|
@ -107,8 +103,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
provenance.setId(it.getAttributes().get("id"));
|
provenance.setId(it.getAttributes().get("id"));
|
||||||
provenance.setName(it.getAttributes().get("name"));
|
provenance.setName(it.getAttributes().get("name"));
|
||||||
provenance.setCollectionMode(provisionMode);
|
provenance.setCollectionMode(provisionMode);
|
||||||
provenance.setCompletionStatus(
|
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||||
it.getAttributes().get("completionStatus"));
|
|
||||||
provenances.add(provenance);
|
provenances.add(provenance);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -120,16 +115,14 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
provenance.setId(it.getAttributes().get("id"));
|
provenance.setId(it.getAttributes().get("id"));
|
||||||
provenance.setName(it.getAttributes().get("name"));
|
provenance.setName(it.getAttributes().get("name"));
|
||||||
provenance.setCollectionMode("resolved");
|
provenance.setCollectionMode("resolved");
|
||||||
provenance.setCompletionStatus(
|
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||||
it.getAttributes().get("completionStatus"));
|
|
||||||
provenances.add(provenance);
|
provenances.add(provenance);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
parsedObject.setDlicollectedfrom(provenances);
|
parsedObject.setDlicollectedfrom(provenances);
|
||||||
parsedObject.setCompletionStatus(
|
parsedObject.setCompletionStatus(
|
||||||
VtdUtilityParser.getSingleValue(
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='completionStatus']"));
|
||||||
ap, vn, "//*[local-name()='completionStatus']"));
|
|
||||||
|
|
||||||
parsedObject.setCollectedfrom(
|
parsedObject.setCollectedfrom(
|
||||||
parsedObject.getDlicollectedfrom().stream()
|
parsedObject.getDlicollectedfrom().stream()
|
||||||
|
@ -148,10 +141,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
vn,
|
vn,
|
||||||
"//*[local-name()='relatedIdentifier']",
|
"//*[local-name()='relatedIdentifier']",
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
"relatedIdentifierType",
|
"relatedIdentifierType", "relationType", "entityType", "inverseRelationType"));
|
||||||
"relationType",
|
|
||||||
"entityType",
|
|
||||||
"inverseRelationType"));
|
|
||||||
generateRelations(
|
generateRelations(
|
||||||
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
||||||
|
|
||||||
|
@ -165,9 +155,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
.map(
|
.map(
|
||||||
it -> {
|
it -> {
|
||||||
final Instance i = new Instance();
|
final Instance i = new Instance();
|
||||||
i.setUrl(
|
i.setUrl(Collections.singletonList(currentPid.getValue()));
|
||||||
Collections.singletonList(
|
|
||||||
currentPid.getValue()));
|
|
||||||
KeyValue h = new KeyValue();
|
KeyValue h = new KeyValue();
|
||||||
i.setHostedby(h);
|
i.setHostedby(h);
|
||||||
h.setKey(it.getAttributes().get("id"));
|
h.setKey(it.getAttributes().get("id"));
|
||||||
|
@ -231,10 +219,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
List<StructuredProperty> subjects =
|
List<StructuredProperty> subjects =
|
||||||
extractSubject(
|
extractSubject(
|
||||||
VtdUtilityParser.getTextValuesWithAttributes(
|
VtdUtilityParser.getTextValuesWithAttributes(
|
||||||
ap,
|
ap, vn, "//*[local-name()='subject']", Collections.singletonList("scheme")));
|
||||||
vn,
|
|
||||||
"//*[local-name()='subject']",
|
|
||||||
Collections.singletonList("scheme")));
|
|
||||||
parsedObject.setSubject(subjects);
|
parsedObject.setSubject(subjects);
|
||||||
|
|
||||||
parsedObject.setDataInfo(di);
|
parsedObject.setDataInfo(di);
|
||||||
|
|
|
@ -55,11 +55,9 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
assertEquals(ds.getEnglishname().getValue(), getValueAsString("englishname", fields));
|
assertEquals(ds.getEnglishname().getValue(), getValueAsString("englishname", fields));
|
||||||
assertEquals(ds.getContactemail().getValue(), getValueAsString("contactemail", fields));
|
assertEquals(ds.getContactemail().getValue(), getValueAsString("contactemail", fields));
|
||||||
assertEquals(ds.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
assertEquals(ds.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
||||||
|
assertEquals(ds.getNamespaceprefix().getValue(), getValueAsString("namespaceprefix", fields));
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ds.getNamespaceprefix().getValue(), getValueAsString("namespaceprefix", fields));
|
ds.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||||
assertEquals(
|
|
||||||
ds.getCollectedfrom().get(0).getValue(),
|
|
||||||
getValueAsString("collectedfromname", fields));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -76,8 +74,7 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
assertEquals(p.getAcronym().getValue(), getValueAsString("acronym", fields));
|
assertEquals(p.getAcronym().getValue(), getValueAsString("acronym", fields));
|
||||||
assertEquals(p.getTitle().getValue(), getValueAsString("title", fields));
|
assertEquals(p.getTitle().getValue(), getValueAsString("title", fields));
|
||||||
assertEquals(
|
assertEquals(
|
||||||
p.getCollectedfrom().get(0).getValue(),
|
p.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||||
getValueAsString("collectedfromname", fields));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -96,18 +93,14 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
assertEquals(o.getLegalshortname().getValue(), getValueAsString("legalshortname", fields));
|
assertEquals(o.getLegalshortname().getValue(), getValueAsString("legalshortname", fields));
|
||||||
assertEquals(o.getLegalname().getValue(), getValueAsString("legalname", fields));
|
assertEquals(o.getLegalname().getValue(), getValueAsString("legalname", fields));
|
||||||
assertEquals(o.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
assertEquals(o.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
||||||
assertEquals(
|
assertEquals(o.getCountry().getClassid(), getValueAsString("country", fields).split("@@@")[0]);
|
||||||
o.getCountry().getClassid(), getValueAsString("country", fields).split("@@@")[0]);
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
o.getCountry().getClassname(), getValueAsString("country", fields).split("@@@")[1]);
|
o.getCountry().getClassname(), getValueAsString("country", fields).split("@@@")[1]);
|
||||||
|
assertEquals(o.getCountry().getSchemeid(), getValueAsString("country", fields).split("@@@")[2]);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
o.getCountry().getSchemeid(), getValueAsString("country", fields).split("@@@")[2]);
|
o.getCountry().getSchemename(), getValueAsString("country", fields).split("@@@")[3]);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
o.getCountry().getSchemename(),
|
o.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||||
getValueAsString("country", fields).split("@@@")[3]);
|
|
||||||
assertEquals(
|
|
||||||
o.getCollectedfrom().get(0).getValue(),
|
|
||||||
getValueAsString("collectedfromname", fields));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -201,8 +194,7 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
private List<TypedField> prepareMocks(final String jsonFile) throws IOException, SQLException {
|
private List<TypedField> prepareMocks(final String jsonFile) throws IOException, SQLException {
|
||||||
final String json = IOUtils.toString(getClass().getResourceAsStream(jsonFile));
|
final String json = IOUtils.toString(getClass().getResourceAsStream(jsonFile));
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
final List<TypedField> list =
|
final List<TypedField> list = mapper.readValue(json, new TypeReference<List<TypedField>>() {});
|
||||||
mapper.readValue(json, new TypeReference<List<TypedField>>() {});
|
|
||||||
|
|
||||||
for (final TypedField tf : list) {
|
for (final TypedField tf : list) {
|
||||||
if (tf.getValue() == null) {
|
if (tf.getValue() == null) {
|
||||||
|
@ -263,8 +255,7 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
break;
|
break;
|
||||||
case "string":
|
case "string":
|
||||||
default:
|
default:
|
||||||
Mockito.when(rs.getString(tf.getField()))
|
Mockito.when(rs.getString(tf.getField())).thenReturn(tf.getValue().toString());
|
||||||
.thenReturn(tf.getValue().toString());
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,12 +37,8 @@ public class SparkGenerateScholix {
|
||||||
|
|
||||||
conf.registerKryoClasses(
|
conf.registerKryoClasses(
|
||||||
new Class[] {
|
new Class[] {
|
||||||
Scholix.class,
|
Scholix.class, ScholixCollectedFrom.class, ScholixEntityId.class,
|
||||||
ScholixCollectedFrom.class,
|
ScholixIdentifier.class, ScholixRelationship.class, ScholixResource.class
|
||||||
ScholixEntityId.class,
|
|
||||||
ScholixIdentifier.class,
|
|
||||||
ScholixRelationship.class,
|
|
||||||
ScholixResource.class
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final String graphPath = parser.get("graphPath");
|
final String graphPath = parser.get("graphPath");
|
||||||
|
@ -51,9 +47,7 @@ public class SparkGenerateScholix {
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
final Dataset<ScholixSummary> scholixSummary =
|
final Dataset<ScholixSummary> scholixSummary =
|
||||||
spark.read()
|
spark.read().load(workingDirPath + "/summary").as(Encoders.bean(ScholixSummary.class));
|
||||||
.load(workingDirPath + "/summary")
|
|
||||||
.as(Encoders.bean(ScholixSummary.class));
|
|
||||||
final Dataset<Relation> rels =
|
final Dataset<Relation> rels =
|
||||||
spark.read().load(graphPath + "/relation").as(Encoders.bean(Relation.class));
|
spark.read().load(graphPath + "/relation").as(Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
@ -80,15 +74,14 @@ public class SparkGenerateScholix {
|
||||||
.save(workingDirPath + "/scholix_target");
|
.save(workingDirPath + "/scholix_target");
|
||||||
|
|
||||||
Dataset<ScholixResource> target =
|
Dataset<ScholixResource> target =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(workingDirPath + "/scholix_target")
|
.load(workingDirPath + "/scholix_target")
|
||||||
.as(Encoders.bean(ScholixResource.class));
|
.as(Encoders.bean(ScholixResource.class));
|
||||||
|
|
||||||
scholix_final
|
scholix_final
|
||||||
.joinWith(
|
.joinWith(
|
||||||
target,
|
target, scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")), "inner")
|
||||||
scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")),
|
|
||||||
"inner")
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Scholix, ScholixResource>, Scholix>)
|
(MapFunction<Tuple2<Scholix, ScholixResource>, Scholix>)
|
||||||
f -> {
|
f -> {
|
||||||
|
|
|
@ -34,44 +34,35 @@ public class SparkGenerateSummary {
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
Dataset<RelatedItemInfo> rInfo =
|
Dataset<RelatedItemInfo> rInfo =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(workingDirPath + "/relatedItemCount")
|
.load(workingDirPath + "/relatedItemCount")
|
||||||
.as(Encoders.bean(RelatedItemInfo.class));
|
.as(Encoders.bean(RelatedItemInfo.class));
|
||||||
|
|
||||||
Dataset<ScholixSummary> entity =
|
Dataset<ScholixSummary> entity =
|
||||||
spark.createDataset(
|
spark.createDataset(
|
||||||
sc.textFile(
|
sc.textFile(
|
||||||
graphPath
|
graphPath + "/publication," + graphPath + "/dataset," + graphPath + "/unknown")
|
||||||
+ "/publication,"
|
|
||||||
+ graphPath
|
|
||||||
+ "/dataset,"
|
|
||||||
+ graphPath
|
|
||||||
+ "/unknown")
|
|
||||||
.map(
|
.map(
|
||||||
s ->
|
s ->
|
||||||
ScholixSummary.fromJsonOAF(
|
ScholixSummary.fromJsonOAF(
|
||||||
ProvisionUtil.getItemTypeFromId(
|
ProvisionUtil.getItemTypeFromId(DHPUtils.getJPathString(jsonIDPath, s)),
|
||||||
DHPUtils.getJPathString(
|
|
||||||
jsonIDPath, s)),
|
|
||||||
s))
|
s))
|
||||||
.rdd(),
|
.rdd(),
|
||||||
Encoders.bean(ScholixSummary.class));
|
Encoders.bean(ScholixSummary.class));
|
||||||
|
|
||||||
Dataset<ScholixSummary> summaryComplete =
|
Dataset<ScholixSummary> summaryComplete =
|
||||||
rInfo.joinWith(entity, rInfo.col("source").equalTo(entity.col("id")))
|
rInfo
|
||||||
|
.joinWith(entity, rInfo.col("source").equalTo(entity.col("id")))
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<
|
(MapFunction<Tuple2<RelatedItemInfo, ScholixSummary>, ScholixSummary>)
|
||||||
Tuple2<RelatedItemInfo, ScholixSummary>,
|
|
||||||
ScholixSummary>)
|
|
||||||
t -> {
|
t -> {
|
||||||
ScholixSummary scholixSummary = t._2();
|
ScholixSummary scholixSummary = t._2();
|
||||||
RelatedItemInfo relatedItemInfo = t._1();
|
RelatedItemInfo relatedItemInfo = t._1();
|
||||||
scholixSummary.setRelatedDatasets(
|
scholixSummary.setRelatedDatasets(relatedItemInfo.getRelatedDataset());
|
||||||
relatedItemInfo.getRelatedDataset());
|
|
||||||
scholixSummary.setRelatedPublications(
|
scholixSummary.setRelatedPublications(
|
||||||
relatedItemInfo.getRelatedPublication());
|
relatedItemInfo.getRelatedPublication());
|
||||||
scholixSummary.setRelatedUnknown(
|
scholixSummary.setRelatedUnknown(relatedItemInfo.getRelatedUnknown());
|
||||||
relatedItemInfo.getRelatedUnknown());
|
|
||||||
return scholixSummary;
|
return scholixSummary;
|
||||||
},
|
},
|
||||||
Encoders.bean(ScholixSummary.class));
|
Encoders.bean(ScholixSummary.class));
|
||||||
|
|
|
@ -45,7 +45,8 @@ public class SparkIndexCollectionOnES {
|
||||||
|
|
||||||
if ("summary".equalsIgnoreCase(type))
|
if ("summary".equalsIgnoreCase(type))
|
||||||
inputRdd =
|
inputRdd =
|
||||||
spark.read()
|
spark
|
||||||
|
.read()
|
||||||
.load(sourcePath)
|
.load(sourcePath)
|
||||||
.as(Encoders.bean(ScholixSummary.class))
|
.as(Encoders.bean(ScholixSummary.class))
|
||||||
.map(
|
.map(
|
||||||
|
|
|
@ -41,8 +41,7 @@ public class Scholix implements Serializable {
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ScholixSummary scholixSummary =
|
ScholixSummary scholixSummary = mapper.readValue(sourceSummaryJson, ScholixSummary.class);
|
||||||
mapper.readValue(sourceSummaryJson, ScholixSummary.class);
|
|
||||||
Relation rel = mapper.readValue(relation, Relation.class);
|
Relation rel = mapper.readValue(relation, Relation.class);
|
||||||
final Scholix s = new Scholix();
|
final Scholix s = new Scholix();
|
||||||
if (scholixSummary.getDate() != null && scholixSummary.getDate().size() > 0)
|
if (scholixSummary.getDate() != null && scholixSummary.getDate().size() > 0)
|
||||||
|
@ -54,9 +53,7 @@ public class Scholix implements Serializable {
|
||||||
new ScholixEntityId(
|
new ScholixEntityId(
|
||||||
cf.getValue(),
|
cf.getValue(),
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new ScholixIdentifier(
|
new ScholixIdentifier(cf.getKey(), "dnet_identifier"))))
|
||||||
cf.getKey(),
|
|
||||||
"dnet_identifier"))))
|
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
s.setRelationship(new ScholixRelationship(rel.getRelType(), rel.getRelClass(), null));
|
s.setRelationship(new ScholixRelationship(rel.getRelType(), rel.getRelClass(), null));
|
||||||
s.setSource(ScholixResource.fromSummary(scholixSummary));
|
s.setSource(ScholixResource.fromSummary(scholixSummary));
|
||||||
|
@ -79,8 +76,7 @@ public class Scholix implements Serializable {
|
||||||
new ScholixEntityId(
|
new ScholixEntityId(
|
||||||
cf.getValue(),
|
cf.getValue(),
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new ScholixIdentifier(
|
new ScholixIdentifier(cf.getKey(), "dnet_identifier"))))
|
||||||
cf.getKey(), "dnet_identifier"))))
|
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
s.setRelationship(new ScholixRelationship(rel.getRelType(), rel.getRelClass(), null));
|
s.setRelationship(new ScholixRelationship(rel.getRelType(), rel.getRelClass(), null));
|
||||||
s.setSource(ScholixResource.fromSummary(scholixSummary));
|
s.setSource(ScholixResource.fromSummary(scholixSummary));
|
||||||
|
@ -106,9 +102,7 @@ public class Scholix implements Serializable {
|
||||||
.map(ScholixEntityId::getName)
|
.map(ScholixEntityId::getName)
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
this.publisher =
|
this.publisher =
|
||||||
publisher.stream()
|
publisher.stream().map(k -> new ScholixEntityId(k, null)).collect(Collectors.toList());
|
||||||
.map(k -> new ScholixEntityId(k, null))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void generateIdentifier() {
|
public void generateIdentifier() {
|
||||||
|
@ -116,17 +110,14 @@ public class Scholix implements Serializable {
|
||||||
DHPUtils.md5(
|
DHPUtils.md5(
|
||||||
String.format(
|
String.format(
|
||||||
"%s::%s::%s",
|
"%s::%s::%s",
|
||||||
source.getDnetIdentifier(),
|
source.getDnetIdentifier(), relationship.getName(), target.getDnetIdentifier())));
|
||||||
relationship.getName(),
|
|
||||||
target.getDnetIdentifier())));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Scholix addTarget(final String targetSummaryJson) {
|
public Scholix addTarget(final String targetSummaryJson) {
|
||||||
final ObjectMapper mapper = new ObjectMapper();
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
ScholixSummary targetSummary =
|
ScholixSummary targetSummary = mapper.readValue(targetSummaryJson, ScholixSummary.class);
|
||||||
mapper.readValue(targetSummaryJson, ScholixSummary.class);
|
|
||||||
setTarget(ScholixResource.fromSummary(targetSummary));
|
setTarget(ScholixResource.fromSummary(targetSummary));
|
||||||
generateIdentifier();
|
generateIdentifier();
|
||||||
return this;
|
return this;
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue