switched automatic code formatting plugin to net.revelc.code.formatter:formatter-maven-plugin
This commit is contained in:
parent
fad94c2155
commit
7a3f8085f7
|
@ -27,11 +27,9 @@ public class GenerateOoziePropertiesMojo extends AbstractMojo {
|
|||
if (System.getProperties().containsKey(PROPERTY_NAME_WF_SOURCE_DIR)
|
||||
&& !System.getProperties().containsKey(PROPERTY_NAME_SANDBOX_NAME)) {
|
||||
String generatedSandboxName =
|
||||
generateSandboxName(
|
||||
System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
||||
generateSandboxName(System.getProperties().getProperty(PROPERTY_NAME_WF_SOURCE_DIR));
|
||||
if (generatedSandboxName != null) {
|
||||
System.getProperties()
|
||||
.setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
||||
System.getProperties().setProperty(PROPERTY_NAME_SANDBOX_NAME, generatedSandboxName);
|
||||
} else {
|
||||
System.out.println(
|
||||
"unable to generate sandbox name from path: "
|
||||
|
|
|
@ -70,16 +70,16 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
|||
protected File outputFile;
|
||||
|
||||
/**
|
||||
* If true, the plugin will silently ignore any non-existent properties files, and the build
|
||||
* will continue
|
||||
* If true, the plugin will silently ignore any non-existent properties files, and the build will
|
||||
* continue
|
||||
*
|
||||
* @parameter property="properties.quiet" default-value="true"
|
||||
*/
|
||||
private boolean quiet;
|
||||
|
||||
/**
|
||||
* Comma separated list of characters to escape when writing property values. cr=carriage
|
||||
* return, lf=linefeed, tab=tab. Any other values are taken literally.
|
||||
* Comma separated list of characters to escape when writing property values. cr=carriage return,
|
||||
* lf=linefeed, tab=tab. Any other values are taken literally.
|
||||
*
|
||||
* @parameter default-value="cr,lf,tab" property="properties.escapeChars"
|
||||
*/
|
||||
|
@ -117,7 +117,8 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
|||
*/
|
||||
private String include;
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.apache.maven.plugin.AbstractMojo#execute()
|
||||
*/
|
||||
@Override
|
||||
|
@ -437,8 +438,7 @@ public class WritePredefinedProjectProperties extends AbstractMojo {
|
|||
public void setIncludePropertyKeysFromFiles(String[] includePropertyKeysFromFiles) {
|
||||
if (includePropertyKeysFromFiles != null) {
|
||||
this.includePropertyKeysFromFiles =
|
||||
Arrays.copyOf(
|
||||
includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
||||
Arrays.copyOf(includePropertyKeysFromFiles, includePropertyKeysFromFiles.length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,8 +81,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testExecuteWithProjectPropertiesExclusion(@TempDir File testFolder)
|
||||
throws Exception {
|
||||
public void testExecuteWithProjectPropertiesExclusion(@TempDir File testFolder) throws Exception {
|
||||
// given
|
||||
String key = "projectPropertyKey";
|
||||
String value = "projectPropertyValue";
|
||||
|
@ -106,8 +105,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testExecuteWithProjectPropertiesInclusion(@TempDir File testFolder)
|
||||
throws Exception {
|
||||
public void testExecuteWithProjectPropertiesInclusion(@TempDir File testFolder) throws Exception {
|
||||
// given
|
||||
String key = "projectPropertyKey";
|
||||
String value = "projectPropertyValue";
|
||||
|
@ -131,8 +129,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testExecuteIncludingPropertyKeysFromFile(@TempDir File testFolder)
|
||||
throws Exception {
|
||||
public void testExecuteIncludingPropertyKeysFromFile(@TempDir File testFolder) throws Exception {
|
||||
// given
|
||||
String key = "projectPropertyKey";
|
||||
String value = "projectPropertyValue";
|
||||
|
@ -148,8 +145,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||
includedProperties.store(new FileWriter(includedPropertiesFile), null);
|
||||
|
||||
mojo.setIncludePropertyKeysFromFiles(
|
||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
|
||||
// execute
|
||||
mojo.execute();
|
||||
|
@ -225,8 +221,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||
includedProperties.storeToXML(new FileOutputStream(includedPropertiesFile), null);
|
||||
|
||||
mojo.setIncludePropertyKeysFromFiles(
|
||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
|
||||
// execute
|
||||
mojo.execute();
|
||||
|
@ -257,8 +252,7 @@ public class WritePredefinedProjectPropertiesTest {
|
|||
includedProperties.setProperty(includedKey, "irrelevantValue");
|
||||
includedProperties.store(new FileOutputStream(includedPropertiesFile), null);
|
||||
|
||||
mojo.setIncludePropertyKeysFromFiles(
|
||||
new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
mojo.setIncludePropertyKeysFromFiles(new String[] {includedPropertiesFile.getAbsolutePath()});
|
||||
|
||||
// execute
|
||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-code-style</artifactId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,252 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<profiles version="10">
|
||||
<profile kind="CodeFormatterProfile" name="Android" version="10">
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||
</profile>
|
||||
</profiles>
|
|
@ -0,0 +1,365 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<profiles version="18">
|
||||
<profile kind="CodeFormatterProfile" name="Android_custom" version="18">
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="32"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.text_block_indentation" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_not_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_never"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||
</profile>
|
||||
</profiles>
|
|
@ -0,0 +1,279 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<profiles version="11">
|
||||
<profile kind="CodeFormatterProfile" name="Forge" version="11">
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="3"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="3"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="3"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="3"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="next_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||
</profile>
|
||||
</profiles>
|
|
@ -0,0 +1,337 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<profiles version="13">
|
||||
<profile kind="CodeFormatterProfile" name="GoogleStyle" version="13">
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_cascading_method_invocation_with_arguments.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_prefer_two_fragments" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_comment_inline_tags" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_local_variable_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_parameter" value="1040"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_type.count_dependent" value="1585|-1|1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression.count_dependent" value="16|4|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration.count_dependent" value="16|4|48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration.count_dependent" value="16|4|49"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_cascading_method_invocation_with_arguments" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.source" value="1.7"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration.count_dependent" value="16|4|48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_local_variable_annotation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants.count_dependent" value="16|5|48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation.count_dependent" value="16|4|48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_package" value="1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_type_annotation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_field_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_generic_type_arguments" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment_new_line_at_start_of_html_paragraph" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comment_prefix" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_parameter_annotation" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_method" value="1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_parameter.count_dependent" value="1040|-1|1040"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_package.count_dependent" value="1585|-1|1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.force_if_else_statement_brace" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="3"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_package_annotation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_type" value="1585"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.7"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_new_anonymous_class" value="20"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_local_variable.count_dependent" value="1585|-1|1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_field.count_dependent" value="1585|-1|1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="100"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="2"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_field" value="1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration.count_dependent" value="16|4|48"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_method.count_dependent" value="1585|-1|1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.wrap_non_simple_member_annotation" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_annotations_on_local_variable" value="1585"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_generic_type_arguments.count_dependent" value="16|-1|16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration.count_dependent" value="16|5|80"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.alignment_for_for_statement" value="16"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
|
||||
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
|
||||
</profile>
|
||||
</profiles>
|
|
@ -9,6 +9,7 @@
|
|||
<artifactId>dhp-build</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<modules>
|
||||
<module>dhp-code-style</module>
|
||||
<module>dhp-build-assembly-resources</module>
|
||||
<module>dhp-build-properties-maven-plugin</module>
|
||||
</modules>
|
||||
|
|
|
@ -35,9 +35,7 @@ public class ArgumentApplicationParser implements Serializable {
|
|||
Arrays.stream(configuration)
|
||||
.map(
|
||||
conf -> {
|
||||
final Option o =
|
||||
new Option(
|
||||
conf.getParamName(), true, conf.getParamDescription());
|
||||
final Option o = new Option(conf.getParamName(), true, conf.getParamDescription());
|
||||
o.setLongOpt(conf.getParamLongName());
|
||||
o.setRequired(conf.isParamRequired());
|
||||
if (conf.isCompressed()) {
|
||||
|
|
|
@ -12,23 +12,18 @@ public class SparkSessionSupport {
|
|||
private SparkSessionSupport() {}
|
||||
|
||||
/**
|
||||
* Runs a given function using SparkSession created using default builder and supplied
|
||||
* SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession
|
||||
* created externally.
|
||||
* Runs a given function using SparkSession created using default builder and supplied SparkConf.
|
||||
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
||||
* externally.
|
||||
*
|
||||
* @param conf SparkConf instance
|
||||
* @param isSparkSessionManaged When true will stop SparkSession
|
||||
* @param fn Consumer to be applied to constructed SparkSession
|
||||
*/
|
||||
public static void runWithSparkSession(
|
||||
SparkConf conf,
|
||||
Boolean isSparkSessionManaged,
|
||||
ThrowingConsumer<SparkSession, Exception> fn) {
|
||||
SparkConf conf, Boolean isSparkSessionManaged, ThrowingConsumer<SparkSession, Exception> fn) {
|
||||
runWithSparkSession(
|
||||
c -> SparkSession.builder().config(c).getOrCreate(),
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
fn);
|
||||
c -> SparkSession.builder().config(c).getOrCreate(), conf, isSparkSessionManaged, fn);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -41,9 +36,7 @@ public class SparkSessionSupport {
|
|||
* @param fn Consumer to be applied to constructed SparkSession
|
||||
*/
|
||||
public static void runWithSparkHiveSession(
|
||||
SparkConf conf,
|
||||
Boolean isSparkSessionManaged,
|
||||
ThrowingConsumer<SparkSession, Exception> fn) {
|
||||
SparkConf conf, Boolean isSparkSessionManaged, ThrowingConsumer<SparkSession, Exception> fn) {
|
||||
runWithSparkSession(
|
||||
c -> SparkSession.builder().config(c).enableHiveSupport().getOrCreate(),
|
||||
conf,
|
||||
|
@ -52,9 +45,9 @@ public class SparkSessionSupport {
|
|||
}
|
||||
|
||||
/**
|
||||
* Runs a given function using SparkSession created using supplied builder and supplied
|
||||
* SparkConf. Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession
|
||||
* created externally.
|
||||
* Runs a given function using SparkSession created using supplied builder and supplied SparkConf.
|
||||
* Stops SparkSession when SparkSession is managed. Allows to reuse SparkSession created
|
||||
* externally.
|
||||
*
|
||||
* @param sparkSessionBuilder Builder of SparkSession
|
||||
* @param conf SparkConf instance
|
||||
|
|
|
@ -32,8 +32,7 @@ public class VtdUtilityParser {
|
|||
}
|
||||
}
|
||||
|
||||
private static Map<String, String> getAttributes(
|
||||
final VTDNav vn, final List<String> attributes) {
|
||||
private static Map<String, String> getAttributes(final VTDNav vn, final List<String> attributes) {
|
||||
final Map<String, String> currentAttributes = new HashMap<>();
|
||||
if (attributes != null) {
|
||||
|
||||
|
|
|
@ -14,8 +14,7 @@ public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinit
|
|||
|
||||
public abstract String getName();
|
||||
|
||||
public abstract Sequence doCall(XPathContext context, Sequence[] arguments)
|
||||
throws XPathException;
|
||||
public abstract Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException;
|
||||
|
||||
@Override
|
||||
public StructuredQName getFunctionQName() {
|
||||
|
|
|
@ -24,8 +24,7 @@ public class PickFirst extends AbstractExtensionFunction {
|
|||
final String s1 = getValue(arguments[0]);
|
||||
final String s2 = getValue(arguments[1]);
|
||||
|
||||
return new StringValue(
|
||||
StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
|
||||
return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : "");
|
||||
}
|
||||
|
||||
private String getValue(final Sequence arg) throws XPathException {
|
||||
|
|
|
@ -89,7 +89,8 @@ public class MessageManager {
|
|||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
channels.values()
|
||||
channels
|
||||
.values()
|
||||
.forEach(
|
||||
ch -> {
|
||||
try {
|
||||
|
@ -125,8 +126,7 @@ public class MessageManager {
|
|||
}
|
||||
|
||||
public void startConsumingMessage(
|
||||
final String queueName, final boolean durable, final boolean autodelete)
|
||||
throws Exception {
|
||||
final String queueName, final boolean durable, final boolean autodelete) throws Exception {
|
||||
|
||||
Channel channel = createChannel(createConnection(), queueName, durable, autodelete);
|
||||
channel.basicConsume(queueName, false, new MessageConsumer(channel, queueMessages));
|
||||
|
|
|
@ -12,8 +12,7 @@ public class ArgumentApplicationParserTest {
|
|||
public void testParseParameter() throws Exception {
|
||||
final String jsonConfiguration =
|
||||
IOUtils.toString(
|
||||
this.getClass()
|
||||
.getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/application/parameters.json"));
|
||||
assertNotNull(jsonConfiguration);
|
||||
ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(
|
||||
|
|
|
@ -21,8 +21,7 @@ public class HdfsSupportTest {
|
|||
@Test
|
||||
public void shouldThrowARuntimeExceptionOnError() {
|
||||
// when
|
||||
assertThrows(
|
||||
RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
|
||||
assertThrows(RuntimeException.class, () -> HdfsSupport.remove(null, new Configuration()));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -53,8 +52,7 @@ public class HdfsSupportTest {
|
|||
@Test
|
||||
public void shouldThrowARuntimeExceptionOnError() {
|
||||
// when
|
||||
assertThrows(
|
||||
RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
|
||||
assertThrows(RuntimeException.class, () -> HdfsSupport.listFiles(null, new Configuration()));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -27,9 +27,7 @@ public class MessageTest {
|
|||
assertEquals(m1.getJobName(), m.getJobName());
|
||||
|
||||
assertNotNull(m1.getBody());
|
||||
m1.getBody()
|
||||
.keySet()
|
||||
.forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
|
||||
m1.getBody().keySet().forEach(it -> assertEquals(m1.getBody().get(it), m.getBody().get(it)));
|
||||
assertEquals(m1.getJobName(), m.getJobName());
|
||||
}
|
||||
|
||||
|
|
|
@ -23,8 +23,7 @@ public class ModelSupport {
|
|||
}
|
||||
|
||||
/**
|
||||
* Defines the mapping between the actual entity types and the relative classes implementing
|
||||
* them
|
||||
* Defines the mapping between the actual entity types and the relative classes implementing them
|
||||
*/
|
||||
public static final Map<EntityType, Class> entityTypes = Maps.newHashMap();
|
||||
|
||||
|
@ -169,40 +168,26 @@ public class ModelSupport {
|
|||
Optional.ofNullable(r.getRelType())
|
||||
.map(
|
||||
relType ->
|
||||
Optional.ofNullable(
|
||||
r
|
||||
.getSubRelType())
|
||||
Optional.ofNullable(r.getSubRelType())
|
||||
.map(
|
||||
subRelType ->
|
||||
Optional
|
||||
.ofNullable(
|
||||
r
|
||||
.getRelClass())
|
||||
Optional.ofNullable(r.getRelClass())
|
||||
.map(
|
||||
relClass ->
|
||||
String
|
||||
.join(
|
||||
String.join(
|
||||
source,
|
||||
target,
|
||||
relType,
|
||||
subRelType,
|
||||
relClass))
|
||||
.orElse(
|
||||
String
|
||||
.join(
|
||||
String.join(
|
||||
source,
|
||||
target,
|
||||
relType,
|
||||
subRelType)))
|
||||
.orElse(
|
||||
String
|
||||
.join(
|
||||
source,
|
||||
target,
|
||||
relType)))
|
||||
.orElse(
|
||||
String.join(
|
||||
source, target)))
|
||||
.orElse(String.join(source, target, relType)))
|
||||
.orElse(String.join(source, target)))
|
||||
.orElse(source))
|
||||
.orElse(null);
|
||||
}
|
||||
|
|
|
@ -76,11 +76,6 @@ public class DataInfo implements Serializable {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
invisible,
|
||||
inferred,
|
||||
deletedbyinference,
|
||||
trust,
|
||||
inferenceprovenance,
|
||||
provenanceaction);
|
||||
invisible, inferred, deletedbyinference, trust, inferenceprovenance, provenanceaction);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,9 +91,7 @@ public class Dataset extends Result implements Serializable {
|
|||
final Dataset d = (Dataset) e;
|
||||
|
||||
storagedate =
|
||||
d.getStoragedate() != null && compareTrust(this, e) < 0
|
||||
? d.getStoragedate()
|
||||
: storagedate;
|
||||
d.getStoragedate() != null && compareTrust(this, e) < 0 ? d.getStoragedate() : storagedate;
|
||||
|
||||
device = d.getDevice() != null && compareTrust(this, e) < 0 ? d.getDevice() : device;
|
||||
|
||||
|
|
|
@ -385,15 +385,10 @@ public class Datasource extends OafEntity implements Serializable {
|
|||
? d.getOfficialname()
|
||||
: officialname;
|
||||
englishname =
|
||||
d.getEnglishname() != null && compareTrust(this, e) < 0
|
||||
? d.getEnglishname()
|
||||
: officialname;
|
||||
d.getEnglishname() != null && compareTrust(this, e) < 0 ? d.getEnglishname() : officialname;
|
||||
websiteurl =
|
||||
d.getWebsiteurl() != null && compareTrust(this, e) < 0
|
||||
? d.getWebsiteurl()
|
||||
: websiteurl;
|
||||
logourl =
|
||||
d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
||||
d.getWebsiteurl() != null && compareTrust(this, e) < 0 ? d.getWebsiteurl() : websiteurl;
|
||||
logourl = d.getLogourl() != null && compareTrust(this, e) < 0 ? d.getLogourl() : getLogourl();
|
||||
contactemail =
|
||||
d.getContactemail() != null && compareTrust(this, e) < 0
|
||||
? d.getContactemail()
|
||||
|
@ -402,20 +397,15 @@ public class Datasource extends OafEntity implements Serializable {
|
|||
d.getNamespaceprefix() != null && compareTrust(this, e) < 0
|
||||
? d.getNamespaceprefix()
|
||||
: namespaceprefix;
|
||||
latitude =
|
||||
d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
||||
latitude = d.getLatitude() != null && compareTrust(this, e) < 0 ? d.getLatitude() : latitude;
|
||||
longitude =
|
||||
d.getLongitude() != null && compareTrust(this, e) < 0
|
||||
? d.getLongitude()
|
||||
: longitude;
|
||||
d.getLongitude() != null && compareTrust(this, e) < 0 ? d.getLongitude() : longitude;
|
||||
dateofvalidation =
|
||||
d.getDateofvalidation() != null && compareTrust(this, e) < 0
|
||||
? d.getDateofvalidation()
|
||||
: dateofvalidation;
|
||||
description =
|
||||
d.getDescription() != null && compareTrust(this, e) < 0
|
||||
? d.getDescription()
|
||||
: description;
|
||||
d.getDescription() != null && compareTrust(this, e) < 0 ? d.getDescription() : description;
|
||||
subjects = mergeLists(subjects, d.getSubjects());
|
||||
|
||||
// opendoar specific fields (od*)
|
||||
|
@ -428,9 +418,7 @@ public class Datasource extends OafEntity implements Serializable {
|
|||
? d.getOdnumberofitemsdate()
|
||||
: odnumberofitemsdate;
|
||||
odpolicies =
|
||||
d.getOdpolicies() != null && compareTrust(this, e) < 0
|
||||
? d.getOdpolicies()
|
||||
: odpolicies;
|
||||
d.getOdpolicies() != null && compareTrust(this, e) < 0 ? d.getOdpolicies() : odpolicies;
|
||||
odlanguages = mergeLists(odlanguages, d.getOdlanguages());
|
||||
odcontenttypes = mergeLists(odcontenttypes, d.getOdcontenttypes());
|
||||
accessinfopackage = mergeLists(accessinfopackage, d.getAccessinfopackage());
|
||||
|
@ -482,9 +470,7 @@ public class Datasource extends OafEntity implements Serializable {
|
|||
: datauploadrestriction;
|
||||
|
||||
versioning =
|
||||
d.getVersioning() != null && compareTrust(this, e) < 0
|
||||
? d.getVersioning()
|
||||
: versioning;
|
||||
d.getVersioning() != null && compareTrust(this, e) < 0 ? d.getVersioning() : versioning;
|
||||
citationguidelineurl =
|
||||
d.getCitationguidelineurl() != null && compareTrust(this, e) < 0
|
||||
? d.getCitationguidelineurl()
|
||||
|
@ -496,9 +482,7 @@ public class Datasource extends OafEntity implements Serializable {
|
|||
? d.getQualitymanagementkind()
|
||||
: qualitymanagementkind;
|
||||
pidsystems =
|
||||
d.getPidsystems() != null && compareTrust(this, e) < 0
|
||||
? d.getPidsystems()
|
||||
: pidsystems;
|
||||
d.getPidsystems() != null && compareTrust(this, e) < 0 ? d.getPidsystems() : pidsystems;
|
||||
|
||||
certificates =
|
||||
d.getCertificates() != null && compareTrust(this, e) < 0
|
||||
|
|
|
@ -123,15 +123,9 @@ public class Instance implements Serializable {
|
|||
public String toComparableString() {
|
||||
return String.format(
|
||||
"%s::%s::%s::%s",
|
||||
hostedby != null && hostedby.getKey() != null
|
||||
? hostedby.getKey().toLowerCase()
|
||||
: "",
|
||||
accessright != null && accessright.getClassid() != null
|
||||
? accessright.getClassid()
|
||||
: "",
|
||||
instancetype != null && instancetype.getClassid() != null
|
||||
? instancetype.getClassid()
|
||||
: "",
|
||||
hostedby != null && hostedby.getKey() != null ? hostedby.getKey().toLowerCase() : "",
|
||||
accessright != null && accessright.getClassid() != null ? accessright.getClassid() : "",
|
||||
instancetype != null && instancetype.getClassid() != null ? instancetype.getClassid() : "",
|
||||
url != null ? url : "");
|
||||
}
|
||||
|
||||
|
|
|
@ -41,8 +41,7 @@ public class KeyValue implements Serializable {
|
|||
? ""
|
||||
: String.format(
|
||||
"%s::%s",
|
||||
key != null ? key.toLowerCase() : "",
|
||||
value != null ? value.toLowerCase() : "");
|
||||
key != null ? key.toLowerCase() : "", value != null ? value.toLowerCase() : "");
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
|
|
|
@ -41,8 +41,7 @@ public abstract class Oaf implements Serializable {
|
|||
}
|
||||
|
||||
protected String extractTrust(Oaf e) {
|
||||
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null)
|
||||
return "0.0";
|
||||
if (e == null || e.getDataInfo() == null || e.getDataInfo().getTrust() == null) return "0.0";
|
||||
return e.getDataInfo().getTrust();
|
||||
}
|
||||
|
||||
|
|
|
@ -180,27 +180,19 @@ public class Organization extends OafEntity implements Serializable {
|
|||
? o.getLegalshortname()
|
||||
: legalshortname;
|
||||
legalname =
|
||||
o.getLegalname() != null && compareTrust(this, e) < 0
|
||||
? o.getLegalname()
|
||||
: legalname;
|
||||
o.getLegalname() != null && compareTrust(this, e) < 0 ? o.getLegalname() : legalname;
|
||||
alternativeNames = mergeLists(o.getAlternativeNames(), alternativeNames);
|
||||
websiteurl =
|
||||
o.getWebsiteurl() != null && compareTrust(this, e) < 0
|
||||
? o.getWebsiteurl()
|
||||
: websiteurl;
|
||||
o.getWebsiteurl() != null && compareTrust(this, e) < 0 ? o.getWebsiteurl() : websiteurl;
|
||||
logourl = o.getLogourl() != null && compareTrust(this, e) < 0 ? o.getLogourl() : logourl;
|
||||
eclegalbody =
|
||||
o.getEclegalbody() != null && compareTrust(this, e) < 0
|
||||
? o.getEclegalbody()
|
||||
: eclegalbody;
|
||||
o.getEclegalbody() != null && compareTrust(this, e) < 0 ? o.getEclegalbody() : eclegalbody;
|
||||
eclegalperson =
|
||||
o.getEclegalperson() != null && compareTrust(this, e) < 0
|
||||
? o.getEclegalperson()
|
||||
: eclegalperson;
|
||||
ecnonprofit =
|
||||
o.getEcnonprofit() != null && compareTrust(this, e) < 0
|
||||
? o.getEcnonprofit()
|
||||
: ecnonprofit;
|
||||
o.getEcnonprofit() != null && compareTrust(this, e) < 0 ? o.getEcnonprofit() : ecnonprofit;
|
||||
ecresearchorganization =
|
||||
o.getEcresearchorganization() != null && compareTrust(this, e) < 0
|
||||
? o.getEcresearchorganization()
|
||||
|
@ -226,9 +218,7 @@ public class Organization extends OafEntity implements Serializable {
|
|||
? o.getEcsmevalidated()
|
||||
: ecsmevalidated;
|
||||
ecnutscode =
|
||||
o.getEcnutscode() != null && compareTrust(this, e) < 0
|
||||
? o.getEcnutscode()
|
||||
: ecnutscode;
|
||||
o.getEcnutscode() != null && compareTrust(this, e) < 0 ? o.getEcnutscode() : ecnutscode;
|
||||
country = o.getCountry() != null && compareTrust(this, e) < 0 ? o.getCountry() : country;
|
||||
mergeOAFDataInfo(o);
|
||||
}
|
||||
|
|
|
@ -80,7 +80,6 @@ public class OriginDescription implements Serializable {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
harvestDate, altered, baseURL, identifier, datestamp, metadataNamespace);
|
||||
return Objects.hash(harvestDate, altered, baseURL, identifier, datestamp, metadataNamespace);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -276,25 +276,19 @@ public class Project extends OafEntity implements Serializable {
|
|||
Project p = (Project) e;
|
||||
|
||||
websiteurl =
|
||||
p.getWebsiteurl() != null && compareTrust(this, e) < 0
|
||||
? p.getWebsiteurl()
|
||||
: websiteurl;
|
||||
p.getWebsiteurl() != null && compareTrust(this, e) < 0 ? p.getWebsiteurl() : websiteurl;
|
||||
code = p.getCode() != null && compareTrust(this, e) < 0 ? p.getCode() : code;
|
||||
acronym = p.getAcronym() != null && compareTrust(this, e) < 0 ? p.getAcronym() : acronym;
|
||||
title = p.getTitle() != null && compareTrust(this, e) < 0 ? p.getTitle() : title;
|
||||
startdate =
|
||||
p.getStartdate() != null && compareTrust(this, e) < 0
|
||||
? p.getStartdate()
|
||||
: startdate;
|
||||
p.getStartdate() != null && compareTrust(this, e) < 0 ? p.getStartdate() : startdate;
|
||||
enddate = p.getEnddate() != null && compareTrust(this, e) < 0 ? p.getEnddate() : enddate;
|
||||
callidentifier =
|
||||
p.getCallidentifier() != null && compareTrust(this, e) < 0
|
||||
? p.getCallidentifier()
|
||||
: callidentifier;
|
||||
keywords =
|
||||
p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
||||
duration =
|
||||
p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
||||
keywords = p.getKeywords() != null && compareTrust(this, e) < 0 ? p.getKeywords() : keywords;
|
||||
duration = p.getDuration() != null && compareTrust(this, e) < 0 ? p.getDuration() : duration;
|
||||
ecsc39 = p.getEcsc39() != null && compareTrust(this, e) < 0 ? p.getEcsc39() : ecsc39;
|
||||
oamandatepublications =
|
||||
p.getOamandatepublications() != null && compareTrust(this, e) < 0
|
||||
|
@ -311,13 +305,9 @@ public class Project extends OafEntity implements Serializable {
|
|||
? p.getContracttype()
|
||||
: contracttype;
|
||||
optional1 =
|
||||
p.getOptional1() != null && compareTrust(this, e) < 0
|
||||
? p.getOptional1()
|
||||
: optional1;
|
||||
p.getOptional1() != null && compareTrust(this, e) < 0 ? p.getOptional1() : optional1;
|
||||
optional2 =
|
||||
p.getOptional2() != null && compareTrust(this, e) < 0
|
||||
? p.getOptional2()
|
||||
: optional2;
|
||||
p.getOptional2() != null && compareTrust(this, e) < 0 ? p.getOptional2() : optional2;
|
||||
jsonextrainfo =
|
||||
p.getJsonextrainfo() != null && compareTrust(this, e) < 0
|
||||
? p.getJsonextrainfo()
|
||||
|
@ -327,9 +317,7 @@ public class Project extends OafEntity implements Serializable {
|
|||
? p.getContactfullname()
|
||||
: contactfullname;
|
||||
contactfax =
|
||||
p.getContactfax() != null && compareTrust(this, e) < 0
|
||||
? p.getContactfax()
|
||||
: contactfax;
|
||||
p.getContactfax() != null && compareTrust(this, e) < 0 ? p.getContactfax() : contactfax;
|
||||
contactphone =
|
||||
p.getContactphone() != null && compareTrust(this, e) < 0
|
||||
? p.getContactphone()
|
||||
|
@ -339,12 +327,9 @@ public class Project extends OafEntity implements Serializable {
|
|||
? p.getContactemail()
|
||||
: contactemail;
|
||||
summary = p.getSummary() != null && compareTrust(this, e) < 0 ? p.getSummary() : summary;
|
||||
currency =
|
||||
p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
||||
currency = p.getCurrency() != null && compareTrust(this, e) < 0 ? p.getCurrency() : currency;
|
||||
totalcost =
|
||||
p.getTotalcost() != null && compareTrust(this, e) < 0
|
||||
? p.getTotalcost()
|
||||
: totalcost;
|
||||
p.getTotalcost() != null && compareTrust(this, e) < 0 ? p.getTotalcost() : totalcost;
|
||||
fundedamount =
|
||||
p.getFundedamount() != null && compareTrust(this, e) < 0
|
||||
? p.getFundedamount()
|
||||
|
|
|
@ -56,12 +56,8 @@ public class DLIDataset extends Dataset {
|
|||
if (a != null)
|
||||
a.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
@ -72,12 +68,8 @@ public class DLIDataset extends Dataset {
|
|||
if (b != null)
|
||||
b.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
|
|
@ -54,12 +54,8 @@ public class DLIPublication extends Publication implements Serializable {
|
|||
if (a != null)
|
||||
a.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
@ -70,12 +66,8 @@ public class DLIPublication extends Publication implements Serializable {
|
|||
if (b != null)
|
||||
b.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
|
|
@ -82,12 +82,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
|||
if (a != null)
|
||||
a.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
@ -98,12 +94,8 @@ public class DLIUnknown extends Oaf implements Serializable {
|
|||
if (b != null)
|
||||
b.forEach(
|
||||
p -> {
|
||||
if (p != null
|
||||
&& StringUtils.isNotBlank(p.getId())
|
||||
&& result.containsKey(p.getId())) {
|
||||
if ("incomplete"
|
||||
.equalsIgnoreCase(
|
||||
result.get(p.getId()).getCompletionStatus())
|
||||
if (p != null && StringUtils.isNotBlank(p.getId()) && result.containsKey(p.getId())) {
|
||||
if ("incomplete".equalsIgnoreCase(result.get(p.getId()).getCompletionStatus())
|
||||
&& StringUtils.isNotBlank(p.getCompletionStatus())) {
|
||||
result.put(p.getId(), p);
|
||||
}
|
||||
|
|
|
@ -65,8 +65,7 @@ public class MigrateActionSet {
|
|||
|
||||
ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
||||
|
||||
Configuration conf =
|
||||
getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
||||
Configuration conf = getConfiguration(distcp_task_timeout, distcp_memory_mb, distcp_num_maps);
|
||||
FileSystem targetFS = FileSystem.get(conf);
|
||||
|
||||
Configuration sourceConf =
|
||||
|
@ -99,13 +98,7 @@ public class MigrateActionSet {
|
|||
final String actionSetDirectory = pathQ.pollLast();
|
||||
|
||||
final Path targetPath =
|
||||
new Path(
|
||||
targetNN
|
||||
+ workDir
|
||||
+ SEPARATOR
|
||||
+ actionSetDirectory
|
||||
+ SEPARATOR
|
||||
+ rawSet);
|
||||
new Path(targetNN + workDir + SEPARATOR + actionSetDirectory + SEPARATOR + rawSet);
|
||||
|
||||
log.info("using TARGET PATH: {}", targetPath);
|
||||
|
||||
|
@ -114,12 +107,7 @@ public class MigrateActionSet {
|
|||
targetFS.delete(targetPath, true);
|
||||
}
|
||||
runDistcp(
|
||||
distcp_num_maps,
|
||||
distcp_memory_mb,
|
||||
distcp_task_timeout,
|
||||
conf,
|
||||
source,
|
||||
targetPath);
|
||||
distcp_num_maps, distcp_memory_mb, distcp_task_timeout, conf, source, targetPath);
|
||||
}
|
||||
|
||||
targetPaths.add(targetPath);
|
||||
|
@ -128,8 +116,7 @@ public class MigrateActionSet {
|
|||
}
|
||||
|
||||
props.setProperty(
|
||||
TARGET_PATHS,
|
||||
targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||
|
||||
try (OutputStream os = new FileOutputStream(file)) {
|
||||
|
|
|
@ -48,9 +48,7 @@ public class ProtoConverter implements Serializable {
|
|||
rel.setRelClass(r.getRelClass());
|
||||
rel.setCollectedfrom(
|
||||
r.getCollectedfromCount() > 0
|
||||
? r.getCollectedfromList().stream()
|
||||
.map(kv -> mapKV(kv))
|
||||
.collect(Collectors.toList())
|
||||
? r.getCollectedfromList().stream().map(kv -> mapKV(kv)).collect(Collectors.toList())
|
||||
: null);
|
||||
return rel;
|
||||
}
|
||||
|
@ -77,9 +75,7 @@ public class ProtoConverter implements Serializable {
|
|||
|
||||
final ResultProtos.Result r = oaf.getEntity().getResult();
|
||||
if (r.getInstanceCount() > 0) {
|
||||
return r.getInstanceList().stream()
|
||||
.map(i -> convertInstance(i))
|
||||
.collect(Collectors.toList());
|
||||
return r.getInstanceList().stream().map(i -> convertInstance(i)).collect(Collectors.toList());
|
||||
}
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
@ -130,8 +126,7 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
private static Datasource convertDataSource(OafProtos.Oaf oaf) {
|
||||
final DatasourceProtos.Datasource.Metadata m =
|
||||
oaf.getEntity().getDatasource().getMetadata();
|
||||
final DatasourceProtos.Datasource.Metadata m = oaf.getEntity().getDatasource().getMetadata();
|
||||
final Datasource datasource = setOaf(new Datasource(), oaf);
|
||||
setEntity(datasource, oaf);
|
||||
datasource.setAccessinfopackage(
|
||||
|
@ -171,9 +166,7 @@ public class ProtoConverter implements Serializable {
|
|||
datasource.setOpenairecompatibility(mapQualifier(m.getOpenairecompatibility()));
|
||||
datasource.setPidsystems(mapStringField(m.getPidsystems()));
|
||||
datasource.setPolicies(
|
||||
m.getPoliciesList().stream()
|
||||
.map(ProtoConverter::mapKV)
|
||||
.collect(Collectors.toList()));
|
||||
m.getPoliciesList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||
datasource.setQualitymanagementkind(mapStringField(m.getQualitymanagementkind()));
|
||||
datasource.setReleaseenddate(mapStringField(m.getReleaseenddate()));
|
||||
datasource.setServiceprovider(mapBoolField(m.getServiceprovider()));
|
||||
|
@ -218,9 +211,7 @@ public class ProtoConverter implements Serializable {
|
|||
project.setTitle(mapStringField(m.getTitle()));
|
||||
project.setWebsiteurl(mapStringField(m.getWebsiteurl()));
|
||||
project.setFundingtree(
|
||||
m.getFundingtreeList().stream()
|
||||
.map(f -> mapStringField(f))
|
||||
.collect(Collectors.toList()));
|
||||
m.getFundingtreeList().stream().map(f -> mapStringField(f)).collect(Collectors.toList()));
|
||||
project.setJsonextrainfo(mapStringField(m.getJsonextrainfo()));
|
||||
project.setSummary(mapStringField(m.getSummary()));
|
||||
project.setOptional1(mapStringField(m.getOptional1()));
|
||||
|
@ -278,9 +269,7 @@ public class ProtoConverter implements Serializable {
|
|||
.map(ProtoConverter::mapStringField)
|
||||
.collect(Collectors.toList()));
|
||||
otherResearchProducts.setTool(
|
||||
m.getToolList().stream()
|
||||
.map(ProtoConverter::mapStringField)
|
||||
.collect(Collectors.toList()));
|
||||
m.getToolList().stream().map(ProtoConverter::mapStringField).collect(Collectors.toList()));
|
||||
|
||||
return otherResearchProducts;
|
||||
}
|
||||
|
@ -326,9 +315,7 @@ public class ProtoConverter implements Serializable {
|
|||
entity.setId(e.getId());
|
||||
entity.setOriginalId(e.getOriginalIdList());
|
||||
entity.setCollectedfrom(
|
||||
e.getCollectedfromList().stream()
|
||||
.map(ProtoConverter::mapKV)
|
||||
.collect(Collectors.toList()));
|
||||
e.getCollectedfromList().stream().map(ProtoConverter::mapKV).collect(Collectors.toList()));
|
||||
entity.setPid(
|
||||
e.getPidList().stream()
|
||||
.map(ProtoConverter::mapStructuredProperty)
|
||||
|
@ -346,9 +333,7 @@ public class ProtoConverter implements Serializable {
|
|||
// setting Entity fields
|
||||
final ResultProtos.Result.Metadata m = oaf.getEntity().getResult().getMetadata();
|
||||
entity.setAuthor(
|
||||
m.getAuthorList().stream()
|
||||
.map(ProtoConverter::mapAuthor)
|
||||
.collect(Collectors.toList()));
|
||||
m.getAuthorList().stream().map(ProtoConverter::mapAuthor).collect(Collectors.toList()));
|
||||
entity.setResulttype(mapQualifier(m.getResulttype()));
|
||||
entity.setLanguage(mapQualifier(m.getLanguage()));
|
||||
entity.setCountry(
|
||||
|
@ -396,12 +381,9 @@ public class ProtoConverter implements Serializable {
|
|||
.map(ProtoConverter::mapStringField)
|
||||
.collect(Collectors.toList()));
|
||||
entity.setContext(
|
||||
m.getContextList().stream()
|
||||
.map(ProtoConverter::mapContext)
|
||||
.collect(Collectors.toList()));
|
||||
m.getContextList().stream().map(ProtoConverter::mapContext).collect(Collectors.toList()));
|
||||
|
||||
entity.setBestaccessright(
|
||||
getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
||||
entity.setBestaccessright(getBestAccessRights(oaf.getEntity().getResult().getInstanceList()));
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
|
|
@ -68,20 +68,17 @@ public class TransformActions implements Serializable {
|
|||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> transformActions(inputPaths, targetBaseDir, spark));
|
||||
conf, isSparkSessionManaged, spark -> transformActions(inputPaths, targetBaseDir, spark));
|
||||
}
|
||||
|
||||
private static void transformActions(
|
||||
String inputPaths, String targetBaseDir, SparkSession spark) throws IOException {
|
||||
private static void transformActions(String inputPaths, String targetBaseDir, SparkSession spark)
|
||||
throws IOException {
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration());
|
||||
|
||||
for (String sourcePath : Lists.newArrayList(Splitter.on(",").split(inputPaths))) {
|
||||
|
||||
LinkedList<String> pathQ =
|
||||
Lists.newLinkedList(Splitter.on(SEPARATOR).split(sourcePath));
|
||||
LinkedList<String> pathQ = Lists.newLinkedList(Splitter.on(SEPARATOR).split(sourcePath));
|
||||
|
||||
final String rawset = pathQ.pollLast();
|
||||
final String actionSetDirectory = pathQ.pollLast();
|
||||
|
@ -98,17 +95,11 @@ public class TransformActions implements Serializable {
|
|||
log.info("transforming actions from '{}' to '{}'", sourcePath, targetDirectory);
|
||||
|
||||
sc.sequenceFile(sourcePath, Text.class, Text.class)
|
||||
.map(
|
||||
a ->
|
||||
eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(
|
||||
a._2().toString()))
|
||||
.map(a -> eu.dnetlib.actionmanager.actions.AtomicAction.fromJSON(a._2().toString()))
|
||||
.map(TransformActions::doTransform)
|
||||
.filter(Objects::nonNull)
|
||||
.mapToPair(
|
||||
a ->
|
||||
new Tuple2<>(
|
||||
a.getClazz().toString(),
|
||||
OBJECT_MAPPER.writeValueAsString(a)))
|
||||
a -> new Tuple2<>(a.getClazz().toString(), OBJECT_MAPPER.writeValueAsString(a)))
|
||||
.mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2())))
|
||||
.saveAsNewAPIHadoopFile(
|
||||
targetDirectory.toString(),
|
||||
|
@ -139,20 +130,14 @@ public class TransformActions implements Serializable {
|
|||
return new AtomicAction<>(Project.class, (Project) oaf);
|
||||
case result:
|
||||
final String resulttypeid =
|
||||
proto_oaf
|
||||
.getEntity()
|
||||
.getResult()
|
||||
.getMetadata()
|
||||
.getResulttype()
|
||||
.getClassid();
|
||||
proto_oaf.getEntity().getResult().getMetadata().getResulttype().getClassid();
|
||||
switch (resulttypeid) {
|
||||
case "publication":
|
||||
return new AtomicAction<>(Publication.class, (Publication) oaf);
|
||||
case "software":
|
||||
return new AtomicAction<>(Software.class, (Software) oaf);
|
||||
case "other":
|
||||
return new AtomicAction<>(
|
||||
OtherResearchProduct.class, (OtherResearchProduct) oaf);
|
||||
return new AtomicAction<>(OtherResearchProduct.class, (OtherResearchProduct) oaf);
|
||||
case "dataset":
|
||||
return new AtomicAction<>(Dataset.class, (Dataset) oaf);
|
||||
default:
|
||||
|
|
|
@ -29,16 +29,13 @@ public class PartitionActionSetsByPayloadTypeJob {
|
|||
private static final StructType KV_SCHEMA =
|
||||
StructType$.MODULE$.apply(
|
||||
Arrays.asList(
|
||||
StructField$.MODULE$.apply(
|
||||
"key", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply(
|
||||
"value", DataTypes.StringType, false, Metadata.empty())));
|
||||
StructField$.MODULE$.apply("key", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply("value", DataTypes.StringType, false, Metadata.empty())));
|
||||
|
||||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
||||
StructType$.MODULE$.apply(
|
||||
Arrays.asList(
|
||||
StructField$.MODULE$.apply(
|
||||
"clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply(
|
||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||
|
||||
|
@ -101,14 +98,10 @@ public class PartitionActionSetsByPayloadTypeJob {
|
|||
private static void readAndWriteActionSetsFromPaths(
|
||||
SparkSession spark, List<String> inputActionSetPaths, String outputPath) {
|
||||
inputActionSetPaths.stream()
|
||||
.filter(
|
||||
path ->
|
||||
HdfsSupport.exists(
|
||||
path, spark.sparkContext().hadoopConfiguration()))
|
||||
.filter(path -> HdfsSupport.exists(path, spark.sparkContext().hadoopConfiguration()))
|
||||
.forEach(
|
||||
inputActionSetPath -> {
|
||||
Dataset<Row> actionDS =
|
||||
readActionSetFromPath(spark, inputActionSetPath);
|
||||
Dataset<Row> actionDS = readActionSetFromPath(spark, inputActionSetPath);
|
||||
saveActions(actionDS, outputPath);
|
||||
});
|
||||
}
|
||||
|
@ -122,7 +115,8 @@ public class PartitionActionSetsByPayloadTypeJob {
|
|||
sc.sequenceFile(path, Text.class, Text.class)
|
||||
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
||||
|
||||
return spark.createDataFrame(rdd, KV_SCHEMA)
|
||||
return spark
|
||||
.createDataFrame(rdd, KV_SCHEMA)
|
||||
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||
.select(expr("atomic_action.*"));
|
||||
}
|
||||
|
|
|
@ -128,30 +128,28 @@ public class PromoteActionPayloadForGraphTableJob {
|
|||
SparkSession spark, String path, Class<G> rowClazz) {
|
||||
logger.info("Reading graph table from path: {}", path);
|
||||
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.textFile(path)
|
||||
.map(
|
||||
(MapFunction<String, G>) value -> OBJECT_MAPPER.readValue(value, rowClazz),
|
||||
Encoders.bean(rowClazz));
|
||||
|
||||
/*
|
||||
return spark
|
||||
.read()
|
||||
.parquet(path)
|
||||
.as(Encoders.bean(rowClazz));
|
||||
* return spark .read() .parquet(path) .as(Encoders.bean(rowClazz));
|
||||
*/
|
||||
}
|
||||
|
||||
private static <A extends Oaf> Dataset<A> readActionPayload(
|
||||
SparkSession spark, String path, Class<A> actionPayloadClazz) {
|
||||
logger.info("Reading action payload from path: {}", path);
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.parquet(path)
|
||||
.map(
|
||||
(MapFunction<Row, A>)
|
||||
value ->
|
||||
OBJECT_MAPPER.readValue(
|
||||
value.<String>getAs("payload"), actionPayloadClazz),
|
||||
OBJECT_MAPPER.readValue(value.<String>getAs("payload"), actionPayloadClazz),
|
||||
Encoders.bean(actionPayloadClazz));
|
||||
}
|
||||
|
||||
|
@ -170,8 +168,7 @@ public class PromoteActionPayloadForGraphTableJob {
|
|||
SerializableSupplier<Function<A, String>> actionPayloadIdFn = ModelSupport::idFn;
|
||||
SerializableSupplier<BiFunction<G, A, G>> mergeRowWithActionPayloadAndGetFn =
|
||||
MergeAndGet.functionFor(strategy);
|
||||
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn =
|
||||
MergeAndGet.functionFor(strategy);
|
||||
SerializableSupplier<BiFunction<G, G, G>> mergeRowsAndGetFn = MergeAndGet.functionFor(strategy);
|
||||
SerializableSupplier<G> zeroFn = zeroFn(rowClazz);
|
||||
SerializableSupplier<Function<G, Boolean>> isNotZeroFn =
|
||||
PromoteActionPayloadForGraphTableJob::isNotZeroFnUsingIdOrSource;
|
||||
|
|
|
@ -23,8 +23,8 @@ public class PromoteActionPayloadFunctions {
|
|||
private PromoteActionPayloadFunctions() {}
|
||||
|
||||
/**
|
||||
* Joins dataset representing graph table with dataset representing action payload using
|
||||
* supplied functions.
|
||||
* Joins dataset representing graph table with dataset representing action payload using supplied
|
||||
* functions.
|
||||
*
|
||||
* @param rowDS Dataset representing graph table
|
||||
* @param actionPayloadDS Dataset representing action payload
|
||||
|
@ -62,30 +62,21 @@ public class PromoteActionPayloadFunctions {
|
|||
.map(
|
||||
(MapFunction<Tuple2<Tuple2<String, G>, Tuple2<String, A>>, G>)
|
||||
value -> {
|
||||
Optional<G> rowOpt =
|
||||
Optional.ofNullable(value._1()).map(Tuple2::_2);
|
||||
Optional<A> actionPayloadOpt =
|
||||
Optional.ofNullable(value._2()).map(Tuple2::_2);
|
||||
return rowOpt.map(
|
||||
Optional<G> rowOpt = Optional.ofNullable(value._1()).map(Tuple2::_2);
|
||||
Optional<A> actionPayloadOpt = Optional.ofNullable(value._2()).map(Tuple2::_2);
|
||||
return rowOpt
|
||||
.map(
|
||||
row ->
|
||||
actionPayloadOpt
|
||||
.map(
|
||||
actionPayload ->
|
||||
mergeAndGetFn
|
||||
.get()
|
||||
.apply(
|
||||
row,
|
||||
actionPayload))
|
||||
mergeAndGetFn.get().apply(row, actionPayload))
|
||||
.orElse(row))
|
||||
.orElseGet(
|
||||
() ->
|
||||
actionPayloadOpt
|
||||
.filter(
|
||||
actionPayload ->
|
||||
actionPayload
|
||||
.getClass()
|
||||
.equals(
|
||||
rowClazz))
|
||||
actionPayload -> actionPayload.getClass().equals(rowClazz))
|
||||
.map(rowClazz::cast)
|
||||
.orElse(null));
|
||||
},
|
||||
|
@ -96,8 +87,7 @@ public class PromoteActionPayloadFunctions {
|
|||
private static <T extends Oaf> Dataset<Tuple2<String, T>> mapToTupleWithId(
|
||||
Dataset<T> ds, SerializableSupplier<Function<T, String>> idFn, Class<T> clazz) {
|
||||
return ds.map(
|
||||
(MapFunction<T, Tuple2<String, T>>)
|
||||
value -> new Tuple2<>(idFn.get().apply(value), value),
|
||||
(MapFunction<T, Tuple2<String, T>>) value -> new Tuple2<>(idFn.get().apply(value), value),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||
}
|
||||
|
||||
|
@ -122,8 +112,8 @@ public class PromoteActionPayloadFunctions {
|
|||
Class<G> rowClazz) {
|
||||
TypedColumn<G, G> aggregator =
|
||||
new TableAggregator<>(zeroFn, mergeAndGetFn, isNotZeroFn, rowClazz).toColumn();
|
||||
return rowDS.groupByKey(
|
||||
(MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
||||
return rowDS
|
||||
.groupByKey((MapFunction<G, String>) x -> rowIdFn.get().apply(x), Encoders.STRING())
|
||||
.agg(aggregator)
|
||||
.map((MapFunction<Tuple2<String, G>, G>) Tuple2::_2, Encoders.kryo(rowClazz));
|
||||
}
|
||||
|
@ -174,8 +164,7 @@ public class PromoteActionPayloadFunctions {
|
|||
} else if (!isNotZero.apply(left) && isNotZero.apply(right)) {
|
||||
return right;
|
||||
}
|
||||
throw new RuntimeException(
|
||||
"internal aggregation error: left and right objects are zero");
|
||||
throw new RuntimeException("internal aggregation error: left and right objects are zero");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,8 +48,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
private static final StructType ATOMIC_ACTION_SCHEMA =
|
||||
StructType$.MODULE$.apply(
|
||||
Arrays.asList(
|
||||
StructField$.MODULE$.apply(
|
||||
"clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||
StructField$.MODULE$.apply(
|
||||
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||
|
||||
|
@ -75,8 +74,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
@Mock private ISClient isClient;
|
||||
|
||||
@Test
|
||||
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir)
|
||||
throws Exception {
|
||||
public void shouldPartitionActionSetsByPayloadType(@TempDir Path workingDir) throws Exception {
|
||||
// given
|
||||
Path inputActionSetsBaseDir = workingDir.resolve("input").resolve("action_sets");
|
||||
Path outputDir = workingDir.resolve("output");
|
||||
|
@ -93,7 +91,9 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
job.setIsClient(isClient);
|
||||
job.run(
|
||||
Boolean.FALSE,
|
||||
"", // it can be empty we're mocking the response from isClient to resolve the
|
||||
"", // it can be empty we're mocking the response from isClient
|
||||
// to
|
||||
// resolve the
|
||||
// paths
|
||||
outputDir.toString());
|
||||
|
||||
|
@ -112,8 +112,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
}
|
||||
}
|
||||
|
||||
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir)
|
||||
throws IOException {
|
||||
private List<String> resolveInputActionSetPaths(Path inputActionSetsBaseDir) throws IOException {
|
||||
Path inputActionSetJsonDumpsDir = getInputActionSetJsonDumpsDir();
|
||||
return Files.list(inputActionSetJsonDumpsDir)
|
||||
.map(
|
||||
|
@ -132,36 +131,27 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
Files.list(inputActionSetJsonDumpsDir)
|
||||
.forEach(
|
||||
inputActionSetJsonDumpFile -> {
|
||||
String inputActionSetId =
|
||||
inputActionSetJsonDumpFile.getFileName().toString();
|
||||
String inputActionSetId = inputActionSetJsonDumpFile.getFileName().toString();
|
||||
Path inputActionSetDir = inputActionSetsDir.resolve(inputActionSetId);
|
||||
|
||||
Dataset<String> actionDS =
|
||||
readActionsFromJsonDump(inputActionSetJsonDumpFile.toString())
|
||||
.cache();
|
||||
readActionsFromJsonDump(inputActionSetJsonDumpFile.toString()).cache();
|
||||
|
||||
writeActionsAsJobInput(
|
||||
actionDS, inputActionSetId, inputActionSetDir.toString());
|
||||
writeActionsAsJobInput(actionDS, inputActionSetId, inputActionSetDir.toString());
|
||||
|
||||
Map<String, List<String>> actionSetOafsByType =
|
||||
actionDS
|
||||
.withColumn(
|
||||
"atomic_action",
|
||||
from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||
.select(expr("atomic_action.*")).groupBy(col("clazz"))
|
||||
.agg(collect_list(col("payload")).as("payload_list"))
|
||||
.collectAsList().stream()
|
||||
.agg(collect_list(col("payload")).as("payload_list")).collectAsList().stream()
|
||||
.map(
|
||||
row ->
|
||||
new AbstractMap.SimpleEntry<>(
|
||||
row.<String>getAs("clazz"),
|
||||
mutableSeqAsJavaList(
|
||||
row.<Seq<String>>getAs(
|
||||
"payload_list"))))
|
||||
mutableSeqAsJavaList(row.<Seq<String>>getAs("payload_list"))))
|
||||
.collect(
|
||||
Collectors.toMap(
|
||||
AbstractMap.SimpleEntry::getKey,
|
||||
AbstractMap.SimpleEntry::getValue));
|
||||
AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
|
||||
|
||||
actionSetOafsByType
|
||||
.keySet()
|
||||
|
@ -183,8 +173,7 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
|
||||
private static Path getInputActionSetJsonDumpsDir() {
|
||||
return Paths.get(
|
||||
Objects.requireNonNull(
|
||||
cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
||||
Objects.requireNonNull(cl.getResource("eu/dnetlib/dhp/actionmanager/partition/input/"))
|
||||
.getFile());
|
||||
}
|
||||
|
||||
|
@ -194,20 +183,16 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
|
||||
private static void writeActionsAsJobInput(
|
||||
Dataset<String> actionDS, String inputActionSetId, String path) {
|
||||
actionDS.javaRDD()
|
||||
actionDS
|
||||
.javaRDD()
|
||||
.mapToPair(json -> new Tuple2<>(new Text(inputActionSetId), new Text(json)))
|
||||
.saveAsNewAPIHadoopFile(
|
||||
path,
|
||||
Text.class,
|
||||
Text.class,
|
||||
SequenceFileOutputFormat.class,
|
||||
configuration);
|
||||
path, Text.class, Text.class, SequenceFileOutputFormat.class, configuration);
|
||||
}
|
||||
|
||||
private static <T extends Oaf> void assertForOafType(
|
||||
Path outputDir, Map<String, List<String>> oafsByClassName, Class<T> clazz) {
|
||||
Path outputDatasetDir =
|
||||
outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
||||
Path outputDatasetDir = outputDir.resolve(String.format("clazz=%s", clazz.getCanonicalName()));
|
||||
Files.exists(outputDatasetDir);
|
||||
|
||||
List<T> actuals =
|
||||
|
@ -225,13 +210,12 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
|
||||
private static <T extends Oaf> Dataset<T> readActionPayloadFromJobOutput(
|
||||
String path, Class<T> clazz) {
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.parquet(path)
|
||||
.map(
|
||||
(MapFunction<Row, T>)
|
||||
value ->
|
||||
OBJECT_MAPPER.readValue(
|
||||
value.<String>getAs("payload"), clazz),
|
||||
value -> OBJECT_MAPPER.readValue(value.<String>getAs("payload"), clazz),
|
||||
Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
|
@ -239,7 +223,6 @@ public class PartitionActionSetsByPayloadTypeJobTest {
|
|||
return rethrowAsRuntimeException(
|
||||
() -> OBJECT_MAPPER.readValue(json, clazz),
|
||||
String.format(
|
||||
"failed to map json to class: json=%s, class=%s",
|
||||
json, clazz.getCanonicalName()));
|
||||
"failed to map json to class: json=%s, class=%s", json, clazz.getCanonicalName()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,8 +23,7 @@ public class MergeAndGetTest {
|
|||
Oaf b = mock(Oaf.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -37,8 +36,7 @@ public class MergeAndGetTest {
|
|||
Relation b = mock(Relation.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -51,8 +49,7 @@ public class MergeAndGetTest {
|
|||
OafEntity b = mock(OafEntity.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -65,8 +62,7 @@ public class MergeAndGetTest {
|
|||
Oaf b = mock(Oaf.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -79,8 +75,7 @@ public class MergeAndGetTest {
|
|||
OafEntity b = mock(OafEntity.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -93,8 +88,7 @@ public class MergeAndGetTest {
|
|||
Relation b = mock(Relation.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
Oaf x = fn.get().apply(a, b);
|
||||
|
@ -110,8 +104,7 @@ public class MergeAndGetTest {
|
|||
Oaf b = mock(Oaf.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -124,8 +117,7 @@ public class MergeAndGetTest {
|
|||
Relation b = mock(Relation.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -141,8 +133,7 @@ public class MergeAndGetTest {
|
|||
OafEntitySub2 b = mock(OafEntitySub2.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
assertThrows(RuntimeException.class, () -> fn.get().apply(a, b));
|
||||
|
@ -155,8 +146,7 @@ public class MergeAndGetTest {
|
|||
OafEntity b = mock(OafEntity.class);
|
||||
|
||||
// when
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn =
|
||||
functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
SerializableSupplier<BiFunction<Oaf, Oaf, Oaf>> fn = functionFor(Strategy.MERGE_FROM_AND_GET);
|
||||
|
||||
// then
|
||||
Oaf x = fn.get().apply(a, b);
|
||||
|
|
|
@ -53,8 +53,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
@BeforeEach
|
||||
public void beforeEach() throws IOException {
|
||||
workingDir =
|
||||
Files.createTempDirectory(
|
||||
PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
||||
Files.createTempDirectory(PromoteActionPayloadForGraphTableJobTest.class.getSimpleName());
|
||||
inputDir = workingDir.resolve("input");
|
||||
inputGraphRootDir = inputDir.resolve("graph");
|
||||
inputActionPayloadRootDir = inputDir.resolve("action_payload");
|
||||
|
@ -88,16 +87,20 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
() ->
|
||||
PromoteActionPayloadForGraphTableJob.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-inputGraphTablePath", "",
|
||||
"-graphTableClassName", rowClazz.getCanonicalName(),
|
||||
"-inputActionPayloadPath", "",
|
||||
"-isSparkSessionManaged",
|
||||
Boolean.FALSE.toString(),
|
||||
"-inputGraphTablePath",
|
||||
"",
|
||||
"-graphTableClassName",
|
||||
rowClazz.getCanonicalName(),
|
||||
"-inputActionPayloadPath",
|
||||
"",
|
||||
"-actionPayloadClassName",
|
||||
actionPayloadClazz.getCanonicalName(),
|
||||
"-outputGraphTablePath", "",
|
||||
"-outputGraphTablePath",
|
||||
"",
|
||||
"-mergeAndGetStrategy",
|
||||
MergeAndGet.Strategy.SELECT_NEWER_AND_GET
|
||||
.name()
|
||||
MergeAndGet.Strategy.SELECT_NEWER_AND_GET.name()
|
||||
}));
|
||||
|
||||
// then
|
||||
|
@ -126,33 +129,38 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
// when
|
||||
PromoteActionPayloadForGraphTableJob.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-inputGraphTablePath", inputGraphTableDir.toString(),
|
||||
"-graphTableClassName", rowClazz.getCanonicalName(),
|
||||
"-inputActionPayloadPath", inputActionPayloadDir.toString(),
|
||||
"-actionPayloadClassName", actionPayloadClazz.getCanonicalName(),
|
||||
"-outputGraphTablePath", outputGraphTableDir.toString(),
|
||||
"-mergeAndGetStrategy", strategy.name()
|
||||
"-isSparkSessionManaged",
|
||||
Boolean.FALSE.toString(),
|
||||
"-inputGraphTablePath",
|
||||
inputGraphTableDir.toString(),
|
||||
"-graphTableClassName",
|
||||
rowClazz.getCanonicalName(),
|
||||
"-inputActionPayloadPath",
|
||||
inputActionPayloadDir.toString(),
|
||||
"-actionPayloadClassName",
|
||||
actionPayloadClazz.getCanonicalName(),
|
||||
"-outputGraphTablePath",
|
||||
outputGraphTableDir.toString(),
|
||||
"-mergeAndGetStrategy",
|
||||
strategy.name()
|
||||
});
|
||||
|
||||
// then
|
||||
assertTrue(Files.exists(outputGraphTableDir));
|
||||
|
||||
List<? extends Oaf> actualOutputRows =
|
||||
readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz)
|
||||
.collectAsList().stream()
|
||||
readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz).collectAsList()
|
||||
.stream()
|
||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||
.collect(Collectors.toList());
|
||||
String expectedOutputGraphTableJsonDumpPath =
|
||||
resultFileLocation(strategy, rowClazz, actionPayloadClazz);
|
||||
Path expectedOutputGraphTableJsonDumpFile =
|
||||
Paths.get(
|
||||
Objects.requireNonNull(
|
||||
cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
||||
Objects.requireNonNull(cl.getResource(expectedOutputGraphTableJsonDumpPath))
|
||||
.getFile());
|
||||
List<? extends Oaf> expectedOutputRows =
|
||||
readGraphTableFromJsonDump(
|
||||
expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
||||
readGraphTableFromJsonDump(expectedOutputGraphTableJsonDumpFile.toString(), rowClazz)
|
||||
.collectAsList().stream()
|
||||
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||
.collect(Collectors.toList());
|
||||
|
@ -170,42 +178,27 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
eu.dnetlib.dhp.schema.oaf.Dataset.class,
|
||||
eu.dnetlib.dhp.schema.oaf.Result.class),
|
||||
arguments(
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
Datasource.class,
|
||||
Datasource.class),
|
||||
arguments(
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
Organization.class,
|
||||
Organization.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Datasource.class, Datasource.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Organization.class, Organization.class),
|
||||
arguments(
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
OtherResearchProduct.class,
|
||||
OtherResearchProduct.class),
|
||||
arguments(
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
OtherResearchProduct.class,
|
||||
Result.class),
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET, OtherResearchProduct.class, Result.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Project.class, Project.class),
|
||||
arguments(
|
||||
MergeAndGet.Strategy.MERGE_FROM_AND_GET,
|
||||
Publication.class,
|
||||
Publication.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Publication.class, Publication.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Publication.class, Result.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Relation.class, Relation.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Software.class),
|
||||
arguments(MergeAndGet.Strategy.MERGE_FROM_AND_GET, Software.class, Result.class));
|
||||
}
|
||||
|
||||
private static <G extends Oaf> Path createGraphTable(
|
||||
Path inputGraphRootDir, Class<G> rowClazz) {
|
||||
private static <G extends Oaf> Path createGraphTable(Path inputGraphRootDir, Class<G> rowClazz) {
|
||||
String inputGraphTableJsonDumpPath = inputGraphTableJsonDumpLocation(rowClazz);
|
||||
Path inputGraphTableJsonDumpFile =
|
||||
Paths.get(
|
||||
Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath))
|
||||
.getFile());
|
||||
Dataset<G> rowDS =
|
||||
readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
||||
Paths.get(Objects.requireNonNull(cl.getResource(inputGraphTableJsonDumpPath)).getFile());
|
||||
Dataset<G> rowDS = readGraphTableFromJsonDump(inputGraphTableJsonDumpFile.toString(), rowClazz);
|
||||
String inputGraphTableName = rowClazz.getSimpleName().toLowerCase();
|
||||
Path inputGraphTableDir = inputGraphRootDir.resolve(inputGraphTableName);
|
||||
writeGraphTableAaJobInput(rowDS, inputGraphTableDir.toString());
|
||||
|
@ -215,13 +208,13 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
private static String inputGraphTableJsonDumpLocation(Class<? extends Oaf> rowClazz) {
|
||||
return String.format(
|
||||
"%s/%s.json",
|
||||
"eu/dnetlib/dhp/actionmanager/promote/input/graph",
|
||||
rowClazz.getSimpleName().toLowerCase());
|
||||
"eu/dnetlib/dhp/actionmanager/promote/input/graph", rowClazz.getSimpleName().toLowerCase());
|
||||
}
|
||||
|
||||
private static <G extends Oaf> Dataset<G> readGraphTableFromJsonDump(
|
||||
String path, Class<G> rowClazz) {
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.textFile(path)
|
||||
.map(
|
||||
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
||||
|
@ -237,9 +230,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
String inputActionPayloadJsonDumpPath =
|
||||
inputActionPayloadJsonDumpLocation(rowClazz, actionPayloadClazz);
|
||||
Path inputActionPayloadJsonDumpFile =
|
||||
Paths.get(
|
||||
Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath))
|
||||
.getFile());
|
||||
Paths.get(Objects.requireNonNull(cl.getResource(inputActionPayloadJsonDumpPath)).getFile());
|
||||
Dataset<String> actionPayloadDS =
|
||||
readActionPayloadFromJsonDump(inputActionPayloadJsonDumpFile.toString());
|
||||
Path inputActionPayloadDir =
|
||||
|
@ -253,8 +244,7 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
|
||||
return String.format(
|
||||
"eu/dnetlib/dhp/actionmanager/promote/input/action_payload/%s_table/%s.json",
|
||||
rowClazz.getSimpleName().toLowerCase(),
|
||||
actionPayloadClazz.getSimpleName().toLowerCase());
|
||||
rowClazz.getSimpleName().toLowerCase(), actionPayloadClazz.getSimpleName().toLowerCase());
|
||||
}
|
||||
|
||||
private static Dataset<String> readActionPayloadFromJsonDump(String path) {
|
||||
|
@ -267,7 +257,8 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
|||
|
||||
private static <G extends Oaf> Dataset<G> readGraphTableFromJobOutput(
|
||||
String path, Class<G> rowClazz) {
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.textFile(path)
|
||||
.map(
|
||||
(MapFunction<String, G>) json -> OBJECT_MAPPER.readValue(json, rowClazz),
|
||||
|
|
|
@ -50,13 +50,7 @@ public class PromoteActionPayloadFunctionsTest {
|
|||
RuntimeException.class,
|
||||
() ->
|
||||
PromoteActionPayloadFunctions.joinGraphTableWithActionPayloadAndMerge(
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
OafImplSubSub.class,
|
||||
OafImpl.class));
|
||||
null, null, null, null, null, OafImplSubSub.class, OafImpl.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -91,12 +85,10 @@ public class PromoteActionPayloadFunctionsTest {
|
|||
Dataset<OafImplSubSub> actionPayloadDS =
|
||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSubSub.class));
|
||||
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
||||
() -> OafImplRoot::getId;
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> actionPayloadIdFn =
|
||||
() -> OafImplRoot::getId;
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>>
|
||||
mergeAndGetFn =
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
||||
() ->
|
||||
(x, y) -> {
|
||||
x.merge(y);
|
||||
|
@ -175,12 +167,10 @@ public class PromoteActionPayloadFunctionsTest {
|
|||
Dataset<OafImplSub> actionPayloadDS =
|
||||
spark.createDataset(actionPayloadData, Encoders.bean(OafImplSub.class));
|
||||
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
||||
() -> OafImplRoot::getId;
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||
SerializableSupplier<Function<OafImplSub, String>> actionPayloadIdFn =
|
||||
() -> OafImplRoot::getId;
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>>
|
||||
mergeAndGetFn =
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSub, OafImplSubSub>> mergeAndGetFn =
|
||||
() ->
|
||||
(x, y) -> {
|
||||
x.merge(y);
|
||||
|
@ -245,10 +235,8 @@ public class PromoteActionPayloadFunctionsTest {
|
|||
Dataset<OafImplSubSub> rowDS =
|
||||
spark.createDataset(rowData, Encoders.bean(OafImplSubSub.class));
|
||||
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn =
|
||||
() -> OafImplRoot::getId;
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>>
|
||||
mergeAndGetFn =
|
||||
SerializableSupplier<Function<OafImplSubSub, String>> rowIdFn = () -> OafImplRoot::getId;
|
||||
SerializableSupplier<BiFunction<OafImplSubSub, OafImplSubSub, OafImplSubSub>> mergeAndGetFn =
|
||||
() ->
|
||||
(x, y) -> {
|
||||
x.merge(y);
|
||||
|
@ -261,12 +249,7 @@ public class PromoteActionPayloadFunctionsTest {
|
|||
// when
|
||||
List<OafImplSubSub> results =
|
||||
PromoteActionPayloadFunctions.groupGraphTableByIdAndMerge(
|
||||
rowDS,
|
||||
rowIdFn,
|
||||
mergeAndGetFn,
|
||||
zeroFn,
|
||||
isNotZeroFn,
|
||||
OafImplSubSub.class)
|
||||
rowDS, rowIdFn, mergeAndGetFn, zeroFn, isNotZeroFn, OafImplSubSub.class)
|
||||
.collectAsList();
|
||||
|
||||
// then
|
||||
|
|
|
@ -51,8 +51,7 @@ public class GenerateNativeStoreSparkJob {
|
|||
if (invalidRecords != null) invalidRecords.add(1);
|
||||
return null;
|
||||
}
|
||||
return new MetadataRecord(
|
||||
originalIdentifier, encoding, provenance, input, dateOfCollection);
|
||||
return new MetadataRecord(originalIdentifier, encoding, provenance, input, dateOfCollection);
|
||||
} catch (Throwable e) {
|
||||
if (invalidRecords != null) invalidRecords.add(1);
|
||||
e.printStackTrace();
|
||||
|
@ -69,8 +68,7 @@ public class GenerateNativeStoreSparkJob {
|
|||
"/eu/dnetlib/dhp/collection/collection_input_parameters.json")));
|
||||
parser.parseArgument(args);
|
||||
final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
final Provenance provenance =
|
||||
jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
||||
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
||||
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
||||
|
||||
final SparkSession spark =
|
||||
|
@ -104,7 +102,8 @@ public class GenerateNativeStoreSparkJob {
|
|||
null);
|
||||
|
||||
final JavaRDD<MetadataRecord> mappeRDD =
|
||||
inputRDD.map(
|
||||
inputRDD
|
||||
.map(
|
||||
item ->
|
||||
parseRecord(
|
||||
item._2().toString(),
|
||||
|
@ -121,10 +120,7 @@ public class GenerateNativeStoreSparkJob {
|
|||
if (!test) {
|
||||
manager.sendMessage(
|
||||
new Message(
|
||||
parser.get("workflowId"),
|
||||
"DataFrameCreation",
|
||||
MessageType.ONGOING,
|
||||
ongoingMap),
|
||||
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||
parser.get("rabbitOngoingQueue"),
|
||||
true,
|
||||
false);
|
||||
|
@ -138,10 +134,7 @@ public class GenerateNativeStoreSparkJob {
|
|||
if (!test) {
|
||||
manager.sendMessage(
|
||||
new Message(
|
||||
parser.get("workflowId"),
|
||||
"DataFrameCreation",
|
||||
MessageType.ONGOING,
|
||||
ongoingMap),
|
||||
parser.get("workflowId"), "DataFrameCreation", MessageType.ONGOING, ongoingMap),
|
||||
parser.get("rabbitOngoingQueue"),
|
||||
true,
|
||||
false);
|
||||
|
@ -152,8 +145,7 @@ public class GenerateNativeStoreSparkJob {
|
|||
reportMap.put("mdStoreSize", "" + mdStoreRecords.value());
|
||||
if (!test) {
|
||||
manager.sendMessage(
|
||||
new Message(
|
||||
parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||
new Message(parser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||
parser.get("rabbitReportQueue"),
|
||||
true,
|
||||
false);
|
||||
|
|
|
@ -34,8 +34,7 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
|||
final List<String> sets = new ArrayList<>();
|
||||
if (setParam != null) {
|
||||
sets.addAll(
|
||||
Lists.newArrayList(
|
||||
Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
||||
Lists.newArrayList(Splitter.on(",").omitEmptyStrings().trimResults().split(setParam)));
|
||||
}
|
||||
if (sets.isEmpty()) {
|
||||
// If no set is defined, ALL the sets must be harvested
|
||||
|
@ -63,14 +62,11 @@ public class OaiCollectorPlugin implements CollectorPlugin {
|
|||
.map(
|
||||
set ->
|
||||
getOaiIteratorFactory()
|
||||
.newIterator(
|
||||
baseUrl, mdFormat, set, fromDate,
|
||||
untilDate))
|
||||
.newIterator(baseUrl, mdFormat, set, fromDate, untilDate))
|
||||
.iterator();
|
||||
|
||||
return StreamSupport.stream(
|
||||
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED),
|
||||
false);
|
||||
Spliterators.spliteratorUnknownSize(Iterators.concat(iters), Spliterator.ORDERED), false);
|
||||
}
|
||||
|
||||
public OaiIteratorFactory getOaiIteratorFactory() {
|
||||
|
|
|
@ -19,8 +19,8 @@ import org.dom4j.io.SAXReader;
|
|||
|
||||
public class OaiIterator implements Iterator<String> {
|
||||
|
||||
private static final Log log =
|
||||
LogFactory.getLog(OaiIterator.class); // NOPMD by marko on 11/24/08 5:02 PM
|
||||
private static final Log log = LogFactory.getLog(OaiIterator.class); // NOPMD by marko on
|
||||
// 11/24/08 5:02 PM
|
||||
|
||||
private final Queue<String> queue = new PriorityBlockingQueue<>();
|
||||
private final SAXReader reader = new SAXReader();
|
||||
|
@ -91,9 +91,7 @@ public class OaiIterator implements Iterator<String> {
|
|||
private String firstPage() throws DnetCollectorException {
|
||||
try {
|
||||
String url =
|
||||
baseUrl
|
||||
+ "?verb=ListRecords&metadataPrefix="
|
||||
+ URLEncoder.encode(mdFormat, "UTF-8");
|
||||
baseUrl + "?verb=ListRecords&metadataPrefix=" + URLEncoder.encode(mdFormat, "UTF-8");
|
||||
if (set != null && !set.isEmpty()) {
|
||||
url += "&set=" + URLEncoder.encode(set, "UTF-8");
|
||||
}
|
||||
|
@ -150,8 +148,7 @@ public class OaiIterator implements Iterator<String> {
|
|||
} catch (final DocumentException e1) {
|
||||
final String resumptionToken = extractResumptionToken(xml);
|
||||
if (resumptionToken == null) {
|
||||
throw new DnetCollectorException(
|
||||
"Error parsing cleaned document:" + cleaned, e1);
|
||||
throw new DnetCollectorException("Error parsing cleaned document:" + cleaned, e1);
|
||||
}
|
||||
return resumptionToken;
|
||||
}
|
||||
|
|
|
@ -48,8 +48,7 @@ public class DnetCollectorWorker {
|
|||
final ApiDescriptor api =
|
||||
jsonMapper.readValue(argumentParser.get("apidescriptor"), ApiDescriptor.class);
|
||||
|
||||
final CollectorPlugin plugin =
|
||||
collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
||||
final CollectorPlugin plugin = collectorPluginFactory.getPluginByProtocol(api.getProtocol());
|
||||
|
||||
final String hdfsuri = argumentParser.get("namenode");
|
||||
|
||||
|
@ -80,7 +79,8 @@ public class DnetCollectorWorker {
|
|||
SequenceFile.Writer.valueClass(Text.class))) {
|
||||
final IntWritable key = new IntWritable(counter.get());
|
||||
final Text value = new Text();
|
||||
plugin.collect(api)
|
||||
plugin
|
||||
.collect(api)
|
||||
.forEach(
|
||||
content -> {
|
||||
key.set(counter.getAndIncrement());
|
||||
|
@ -92,13 +92,11 @@ public class DnetCollectorWorker {
|
|||
"Sending message: "
|
||||
+ manager.sendMessage(
|
||||
new Message(
|
||||
argumentParser.get(
|
||||
"workflowId"),
|
||||
argumentParser.get("workflowId"),
|
||||
"Collection",
|
||||
MessageType.ONGOING,
|
||||
ongoingMap),
|
||||
argumentParser.get(
|
||||
"rabbitOngoingQueue"),
|
||||
argumentParser.get("rabbitOngoingQueue"),
|
||||
true,
|
||||
false));
|
||||
} catch (Exception e) {
|
||||
|
@ -115,20 +113,14 @@ public class DnetCollectorWorker {
|
|||
ongoingMap.put("ongoing", "" + counter.get());
|
||||
manager.sendMessage(
|
||||
new Message(
|
||||
argumentParser.get("workflowId"),
|
||||
"Collection",
|
||||
MessageType.ONGOING,
|
||||
ongoingMap),
|
||||
argumentParser.get("workflowId"), "Collection", MessageType.ONGOING, ongoingMap),
|
||||
argumentParser.get("rabbitOngoingQueue"),
|
||||
true,
|
||||
false);
|
||||
reportMap.put("collected", "" + counter.get());
|
||||
manager.sendMessage(
|
||||
new Message(
|
||||
argumentParser.get("workflowId"),
|
||||
"Collection",
|
||||
MessageType.REPORT,
|
||||
reportMap),
|
||||
argumentParser.get("workflowId"), "Collection", MessageType.REPORT, reportMap),
|
||||
argumentParser.get("rabbitOngoingQueue"),
|
||||
true,
|
||||
false);
|
||||
|
|
|
@ -6,8 +6,7 @@ import eu.dnetlib.dhp.collection.worker.DnetCollectorException;
|
|||
|
||||
public class CollectorPluginFactory {
|
||||
|
||||
public CollectorPlugin getPluginByProtocol(final String protocol)
|
||||
throws DnetCollectorException {
|
||||
public CollectorPlugin getPluginByProtocol(final String protocol) throws DnetCollectorException {
|
||||
if (protocol == null) throw new DnetCollectorException("protocol cannot be null");
|
||||
switch (protocol.toLowerCase().trim()) {
|
||||
case "oai":
|
||||
|
|
|
@ -51,15 +51,12 @@ public class HttpConnector {
|
|||
* @return the content of the downloaded resource as InputStream
|
||||
* @throws DnetCollectorException when retrying more than maxNumberOfRetry times
|
||||
*/
|
||||
public InputStream getInputSourceAsStream(final String requestUrl)
|
||||
throws DnetCollectorException {
|
||||
public InputStream getInputSourceAsStream(final String requestUrl) throws DnetCollectorException {
|
||||
return attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||
}
|
||||
|
||||
private String attemptDownlaodAsString(
|
||||
final String requestUrl,
|
||||
final int retryNumber,
|
||||
final CollectorPluginErrorLogList errorList)
|
||||
final String requestUrl, final int retryNumber, final CollectorPluginErrorLogList errorList)
|
||||
throws DnetCollectorException {
|
||||
try {
|
||||
final InputStream s = attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||
|
@ -79,14 +76,11 @@ public class HttpConnector {
|
|||
}
|
||||
|
||||
private InputStream attemptDownload(
|
||||
final String requestUrl,
|
||||
final int retryNumber,
|
||||
final CollectorPluginErrorLogList errorList)
|
||||
final String requestUrl, final int retryNumber, final CollectorPluginErrorLogList errorList)
|
||||
throws DnetCollectorException {
|
||||
|
||||
if (retryNumber > maxNumberOfRetry) {
|
||||
throw new DnetCollectorException(
|
||||
"Max number of retries exceeded. Cause: \n " + errorList);
|
||||
throw new DnetCollectorException("Max number of retries exceeded. Cause: \n " + errorList);
|
||||
}
|
||||
|
||||
log.debug("Downloading " + requestUrl + " - try: " + retryNumber);
|
||||
|
@ -94,8 +88,7 @@ public class HttpConnector {
|
|||
InputStream input = null;
|
||||
|
||||
try {
|
||||
final HttpURLConnection urlConn =
|
||||
(HttpURLConnection) new URL(requestUrl).openConnection();
|
||||
final HttpURLConnection urlConn = (HttpURLConnection) new URL(requestUrl).openConnection();
|
||||
urlConn.setInstanceFollowRedirects(false);
|
||||
urlConn.setReadTimeout(readTimeOut * 1000);
|
||||
urlConn.addRequestProperty("User-Agent", userAgent);
|
||||
|
@ -105,8 +98,7 @@ public class HttpConnector {
|
|||
}
|
||||
|
||||
final int retryAfter = obtainRetryAfter(urlConn.getHeaderFields());
|
||||
if (retryAfter > 0
|
||||
&& urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) {
|
||||
if (retryAfter > 0 && urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) {
|
||||
log.warn("waiting and repeating request after " + retryAfter + " sec.");
|
||||
Thread.sleep(retryAfter * 1000);
|
||||
errorList.add("503 Service Unavailable");
|
||||
|
@ -119,21 +111,16 @@ public class HttpConnector {
|
|||
errorList.add(
|
||||
String.format(
|
||||
"%s %s. Moved to: %s",
|
||||
urlConn.getResponseCode(),
|
||||
urlConn.getResponseMessage(),
|
||||
newUrl));
|
||||
urlConn.getResponseCode(), urlConn.getResponseMessage(), newUrl));
|
||||
urlConn.disconnect();
|
||||
return attemptDownload(newUrl, retryNumber + 1, errorList);
|
||||
} else if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||
log.error(
|
||||
String.format(
|
||||
"HTTP error: %s %s",
|
||||
urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||
"HTTP error: %s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||
Thread.sleep(defaultDelay * 1000);
|
||||
errorList.add(
|
||||
String.format(
|
||||
"%s %s",
|
||||
urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||
String.format("%s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||
urlConn.disconnect();
|
||||
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
||||
} else {
|
||||
|
@ -179,9 +166,7 @@ public class HttpConnector {
|
|||
private String obtainNewLocation(final Map<String, List<String>> headerMap)
|
||||
throws DnetCollectorException {
|
||||
for (final String key : headerMap.keySet()) {
|
||||
if (key != null
|
||||
&& key.toLowerCase().equals("location")
|
||||
&& headerMap.get(key).size() > 0) {
|
||||
if (key != null && key.toLowerCase().equals("location") && headerMap.get(key).size() > 0) {
|
||||
return headerMap.get(key).get(0);
|
||||
}
|
||||
}
|
||||
|
@ -198,12 +183,10 @@ public class HttpConnector {
|
|||
new X509TrustManager() {
|
||||
|
||||
@Override
|
||||
public void checkClientTrusted(
|
||||
final X509Certificate[] xcs, final String string) {}
|
||||
public void checkClientTrusted(final X509Certificate[] xcs, final String string) {}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(
|
||||
final X509Certificate[] xcs, final String string) {}
|
||||
public void checkServerTrusted(final X509Certificate[] xcs, final String string) {}
|
||||
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
|
|
|
@ -19,8 +19,8 @@ public class XmlCleaner {
|
|||
private static Pattern invalidControlCharPattern = Pattern.compile("&#x?1[0-9a-fA-F];");
|
||||
|
||||
/**
|
||||
* Pattern that negates the allowable XML 4 byte unicode characters. Valid are: #x9 | #xA | #xD
|
||||
* | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||
* Pattern that negates the allowable XML 4 byte unicode characters. Valid are: #x9 | #xA | #xD |
|
||||
* [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
|
||||
*/
|
||||
private static Pattern invalidCharacterPattern =
|
||||
Pattern.compile("[^\t\r\n\u0020-\uD7FF\uE000-\uFFFD]"); // $NON-NLS-1$
|
||||
|
@ -72,14 +72,14 @@ public class XmlCleaner {
|
|||
badEntities.put("Ÿ", " "); // $NON-NLS-1$ //$NON-NLS-2$ // illegal HTML character
|
||||
// misc entities
|
||||
badEntities.put("€", "\u20AC"); // $NON-NLS-1$ //$NON-NLS-2$ // euro
|
||||
badEntities.put(
|
||||
"‘", "\u2018"); // $NON-NLS-1$ //$NON-NLS-2$ // left single quotation mark
|
||||
badEntities.put(
|
||||
"’", "\u2019"); // $NON-NLS-1$ //$NON-NLS-2$ // right single quotation mark
|
||||
badEntities.put("‘", "\u2018"); // $NON-NLS-1$ //$NON-NLS-2$ // left single quotation
|
||||
// mark
|
||||
badEntities.put("’", "\u2019"); // $NON-NLS-1$ //$NON-NLS-2$ // right single quotation
|
||||
// mark
|
||||
// Latin 1 entities
|
||||
badEntities.put(" ", "\u00A0"); // $NON-NLS-1$ //$NON-NLS-2$ // no-break space
|
||||
badEntities.put(
|
||||
"¡", "\u00A1"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted exclamation mark
|
||||
badEntities.put("¡", "\u00A1"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted exclamation
|
||||
// mark
|
||||
badEntities.put("¢", "\u00A2"); // $NON-NLS-1$ //$NON-NLS-2$ // cent sign
|
||||
badEntities.put("£", "\u00A3"); // $NON-NLS-1$ //$NON-NLS-2$ // pound sign
|
||||
badEntities.put("¤", "\u00A4"); // $NON-NLS-1$ //$NON-NLS-2$ // currency sign
|
||||
|
@ -88,11 +88,10 @@ public class XmlCleaner {
|
|||
badEntities.put("§", "\u00A7"); // $NON-NLS-1$ //$NON-NLS-2$ // section sign
|
||||
badEntities.put("¨", "\u00A8"); // $NON-NLS-1$ //$NON-NLS-2$ // diaeresis
|
||||
badEntities.put("©", "\u00A9"); // $NON-NLS-1$ //$NON-NLS-2$ // copyright sign
|
||||
badEntities.put(
|
||||
"ª", "\u00AA"); // $NON-NLS-1$ //$NON-NLS-2$ // feminine ordinal indicator
|
||||
badEntities.put(
|
||||
"«",
|
||||
"\u00AB"); //$NON-NLS-1$ //$NON-NLS-2$ // left-pointing double angle quotation mark
|
||||
badEntities.put("ª", "\u00AA"); // $NON-NLS-1$ //$NON-NLS-2$ // feminine ordinal
|
||||
// indicator
|
||||
badEntities.put("«", "\u00AB"); // $NON-NLS-2$ // left-pointing double angle
|
||||
// quotation mark
|
||||
badEntities.put("¬", "\u00AC"); // $NON-NLS-1$ //$NON-NLS-2$ // not sign
|
||||
badEntities.put("­", "\u00AD"); // $NON-NLS-1$ //$NON-NLS-2$ // soft hyphen
|
||||
badEntities.put("®", "\u00AE"); // $NON-NLS-1$ //$NON-NLS-2$ // registered sign
|
||||
|
@ -107,205 +106,198 @@ public class XmlCleaner {
|
|||
badEntities.put("·", "\u00B7"); // $NON-NLS-1$ //$NON-NLS-2$ // middle dot
|
||||
badEntities.put("¸", "\u00B8"); // $NON-NLS-1$ //$NON-NLS-2$ // cedilla
|
||||
badEntities.put("¹", "\u00B9"); // $NON-NLS-1$ //$NON-NLS-2$ // superscript one
|
||||
badEntities.put(
|
||||
"º", "\u00BA"); // $NON-NLS-1$ //$NON-NLS-2$ // masculine ordinal indicator
|
||||
badEntities.put(
|
||||
"»",
|
||||
"\u00BB"); //$NON-NLS-1$ //$NON-NLS-2$ // right-pointing double angle quotation mark
|
||||
badEntities.put(
|
||||
"¼", "\u00BC"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one quarter
|
||||
badEntities.put(
|
||||
"½", "\u00BD"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one half
|
||||
badEntities.put(
|
||||
"¾",
|
||||
"\u00BE"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction three quarters
|
||||
badEntities.put(
|
||||
"¿", "\u00BF"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted question mark
|
||||
badEntities.put(
|
||||
"À",
|
||||
"\u00C0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with grave
|
||||
badEntities.put(
|
||||
"Á",
|
||||
"\u00C1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with acute
|
||||
badEntities.put(
|
||||
"Â",
|
||||
"\u00C2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with circumflex
|
||||
badEntities.put(
|
||||
"Ã",
|
||||
"\u00C3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with tilde
|
||||
badEntities.put(
|
||||
"Ä",
|
||||
"\u00C4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with diaeresis
|
||||
badEntities.put(
|
||||
"Å",
|
||||
"\u00C5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A with ring above
|
||||
badEntities.put(
|
||||
"Æ", "\u00C6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter AE
|
||||
badEntities.put(
|
||||
"Ç",
|
||||
"\u00C7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter C with cedilla
|
||||
badEntities.put(
|
||||
"È",
|
||||
"\u00C8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with grave
|
||||
badEntities.put(
|
||||
"É",
|
||||
"\u00C9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with acute
|
||||
badEntities.put(
|
||||
"Ê",
|
||||
"\u00CA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with circumflex
|
||||
badEntities.put(
|
||||
"Ë",
|
||||
"\u00CB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E with diaeresis
|
||||
badEntities.put(
|
||||
"Ì",
|
||||
"\u00CC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with grave
|
||||
badEntities.put(
|
||||
"Í",
|
||||
"\u00CD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with acute
|
||||
badEntities.put(
|
||||
"Î",
|
||||
"\u00CE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with circumflex
|
||||
badEntities.put(
|
||||
"Ï",
|
||||
"\u00CF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I with diaeresis
|
||||
badEntities.put("º", "\u00BA"); // $NON-NLS-1$ //$NON-NLS-2$ // masculine ordinal
|
||||
// indicator
|
||||
badEntities.put("»", "\u00BB"); // $NON-NLS-2$ // right-pointing double
|
||||
// angle quotation
|
||||
// mark
|
||||
badEntities.put("¼", "\u00BC"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one
|
||||
// quarter
|
||||
badEntities.put("½", "\u00BD"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction one
|
||||
// half
|
||||
badEntities.put("¾", "\u00BE"); // $NON-NLS-1$ //$NON-NLS-2$ // vulgar fraction three
|
||||
// quarters
|
||||
badEntities.put("¿", "\u00BF"); // $NON-NLS-1$ //$NON-NLS-2$ // inverted question
|
||||
// mark
|
||||
badEntities.put("À", "\u00C0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// A
|
||||
// with grave
|
||||
badEntities.put("Á", "\u00C1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// A
|
||||
// with acute
|
||||
badEntities.put("Â", "\u00C2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||
// with circumflex
|
||||
badEntities.put("Ã", "\u00C3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// A
|
||||
// with tilde
|
||||
badEntities.put("Ä", "\u00C4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("Å", "\u00C5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter A
|
||||
// with ring above
|
||||
badEntities.put("Æ", "\u00C6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// AE
|
||||
badEntities.put("Ç", "\u00C7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// C
|
||||
// with cedilla
|
||||
badEntities.put("È", "\u00C8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// E
|
||||
// with grave
|
||||
badEntities.put("É", "\u00C9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// E
|
||||
// with acute
|
||||
badEntities.put("Ê", "\u00CA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E
|
||||
// with circumflex
|
||||
badEntities.put("Ë", "\u00CB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter E
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("Ì", "\u00CC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// I
|
||||
// with grave
|
||||
badEntities.put("Í", "\u00CD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// I
|
||||
// with acute
|
||||
badEntities.put("Î", "\u00CE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I
|
||||
// with circumflex
|
||||
badEntities.put("Ï", "\u00CF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter I
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("Ð", "\u00D0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter ETH
|
||||
badEntities.put(
|
||||
"Ñ",
|
||||
"\u00D1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter N with tilde
|
||||
badEntities.put(
|
||||
"Ò",
|
||||
"\u00D2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with grave
|
||||
badEntities.put(
|
||||
"Ó",
|
||||
"\u00D3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with acute
|
||||
badEntities.put(
|
||||
"Ô",
|
||||
"\u00D4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with circumflex
|
||||
badEntities.put(
|
||||
"Õ",
|
||||
"\u00D5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with tilde
|
||||
badEntities.put(
|
||||
"Ö",
|
||||
"\u00D6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with diaeresis
|
||||
badEntities.put("Ñ", "\u00D1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// N
|
||||
// with tilde
|
||||
badEntities.put("Ò", "\u00D2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// O
|
||||
// with grave
|
||||
badEntities.put("Ó", "\u00D3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// O
|
||||
// with acute
|
||||
badEntities.put("Ô", "\u00D4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O
|
||||
// with circumflex
|
||||
badEntities.put("Õ", "\u00D5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// O
|
||||
// with tilde
|
||||
badEntities.put("Ö", "\u00D6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("×", "\u00D7"); // $NON-NLS-1$ //$NON-NLS-2$ // multiplication sign
|
||||
badEntities.put(
|
||||
"Ø",
|
||||
"\u00D8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter O with stroke
|
||||
badEntities.put(
|
||||
"Ù",
|
||||
"\u00D9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with grave
|
||||
badEntities.put(
|
||||
"Ú",
|
||||
"\u00DA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with acute
|
||||
badEntities.put(
|
||||
"Û",
|
||||
"\u00DB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with circumflex
|
||||
badEntities.put(
|
||||
"Ü",
|
||||
"\u00DC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U with diaeresis
|
||||
badEntities.put(
|
||||
"Ý",
|
||||
"\u00DD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter Y with acute
|
||||
badEntities.put(
|
||||
"Þ", "\u00DE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter THORN
|
||||
badEntities.put(
|
||||
"ß", "\u00DF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter sharp s
|
||||
badEntities.put(
|
||||
"à",
|
||||
"\u00E0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with grave
|
||||
badEntities.put(
|
||||
"á",
|
||||
"\u00E1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with acute
|
||||
badEntities.put(
|
||||
"â",
|
||||
"\u00E2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with circumflex
|
||||
badEntities.put(
|
||||
"ã",
|
||||
"\u00E3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with tilde
|
||||
badEntities.put(
|
||||
"ä",
|
||||
"\u00E4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with diaeresis
|
||||
badEntities.put(
|
||||
"å",
|
||||
"\u00E5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a with ring above
|
||||
badEntities.put("Ø", "\u00D8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// O
|
||||
// with stroke
|
||||
badEntities.put("Ù", "\u00D9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// U
|
||||
// with grave
|
||||
badEntities.put("Ú", "\u00DA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// U
|
||||
// with acute
|
||||
badEntities.put("Û", "\u00DB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U
|
||||
// with circumflex
|
||||
badEntities.put("Ü", "\u00DC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter U
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("Ý", "\u00DD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// Y
|
||||
// with acute
|
||||
badEntities.put("Þ", "\u00DE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin capital letter
|
||||
// THORN
|
||||
badEntities.put("ß", "\u00DF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter
|
||||
// sharp s
|
||||
badEntities.put("à", "\u00E0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// grave
|
||||
badEntities.put("á", "\u00E1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("â", "\u00E2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// circumflex
|
||||
badEntities.put("ã", "\u00E3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// tilde
|
||||
badEntities.put("ä", "\u00E4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("å", "\u00E5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter a
|
||||
// with
|
||||
// ring above
|
||||
badEntities.put("æ", "\u00E6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter ae
|
||||
badEntities.put(
|
||||
"ç",
|
||||
"\u00E7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter c with cedilla
|
||||
badEntities.put(
|
||||
"è",
|
||||
"\u00E8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with grave
|
||||
badEntities.put(
|
||||
"é",
|
||||
"\u00E9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with acute
|
||||
badEntities.put(
|
||||
"ê",
|
||||
"\u00EA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with circumflex
|
||||
badEntities.put(
|
||||
"ë",
|
||||
"\u00EB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e with diaeresis
|
||||
badEntities.put(
|
||||
"ì",
|
||||
"\u00EC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with grave
|
||||
badEntities.put(
|
||||
"í",
|
||||
"\u00ED"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with acute
|
||||
badEntities.put(
|
||||
"î",
|
||||
"\u00EE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with circumflex
|
||||
badEntities.put(
|
||||
"ï",
|
||||
"\u00EF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i with diaeresis
|
||||
badEntities.put("ç", "\u00E7"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter c
|
||||
// with
|
||||
// cedilla
|
||||
badEntities.put("è", "\u00E8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||
// with
|
||||
// grave
|
||||
badEntities.put("é", "\u00E9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("ê", "\u00EA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||
// with
|
||||
// circumflex
|
||||
badEntities.put("ë", "\u00EB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter e
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("ì", "\u00EC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||
// with
|
||||
// grave
|
||||
badEntities.put("í", "\u00ED"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("î", "\u00EE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||
// with
|
||||
// circumflex
|
||||
badEntities.put("ï", "\u00EF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter i
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("ð", "\u00F0"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter eth
|
||||
badEntities.put(
|
||||
"ñ",
|
||||
"\u00F1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter n with tilde
|
||||
badEntities.put(
|
||||
"ò",
|
||||
"\u00F2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with grave
|
||||
badEntities.put(
|
||||
"ó",
|
||||
"\u00F3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with acute
|
||||
badEntities.put(
|
||||
"ô",
|
||||
"\u00F4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with circumflex
|
||||
badEntities.put(
|
||||
"õ",
|
||||
"\u00F5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with tilde
|
||||
badEntities.put(
|
||||
"ö",
|
||||
"\u00F6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with diaeresis
|
||||
badEntities.put("ñ", "\u00F1"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter n
|
||||
// with
|
||||
// tilde
|
||||
badEntities.put("ò", "\u00F2"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// grave
|
||||
badEntities.put("ó", "\u00F3"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("ô", "\u00F4"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// circumflex
|
||||
badEntities.put("õ", "\u00F5"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// tilde
|
||||
badEntities.put("ö", "\u00F6"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("÷", "\u00F7"); // $NON-NLS-1$ //$NON-NLS-2$ // division sign
|
||||
badEntities.put(
|
||||
"ø",
|
||||
"\u00F8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o with stroke
|
||||
badEntities.put(
|
||||
"ù",
|
||||
"\u00F9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with grave
|
||||
badEntities.put(
|
||||
"ú",
|
||||
"\u00FA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with acute
|
||||
badEntities.put(
|
||||
"û",
|
||||
"\u00FB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with circumflex
|
||||
badEntities.put(
|
||||
"ü",
|
||||
"\u00FC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u with diaeresis
|
||||
badEntities.put(
|
||||
"ý",
|
||||
"\u00FD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y with acute
|
||||
badEntities.put(
|
||||
"þ", "\u00FE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter thorn
|
||||
badEntities.put(
|
||||
"ÿ",
|
||||
"\u00FF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y with diaeresis
|
||||
badEntities.put("ø", "\u00F8"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter o
|
||||
// with
|
||||
// stroke
|
||||
badEntities.put("ù", "\u00F9"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||
// with
|
||||
// grave
|
||||
badEntities.put("ú", "\u00FA"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("û", "\u00FB"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||
// with
|
||||
// circumflex
|
||||
badEntities.put("ü", "\u00FC"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter u
|
||||
// with
|
||||
// diaeresis
|
||||
badEntities.put("ý", "\u00FD"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y
|
||||
// with
|
||||
// acute
|
||||
badEntities.put("þ", "\u00FE"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter
|
||||
// thorn
|
||||
badEntities.put("ÿ", "\u00FF"); // $NON-NLS-1$ //$NON-NLS-2$ // latin small letter y
|
||||
// with
|
||||
// diaeresis
|
||||
}
|
||||
|
||||
/**
|
||||
* For each entity in the input that is not allowed in XML, replace the entity with its unicode
|
||||
* equivalent or remove it. For each instance of a bare {@literal &}, replace it with {@literal
|
||||
* &<br/> } XML only allows 4 entities: {@literal &amp;}, {@literal &quot;},
|
||||
* {@literal &lt;} and {@literal &gt;}.
|
||||
* &<br/> } XML only allows 4 entities: {@literal &amp;}, {@literal &quot;}, {@literal
|
||||
* &lt;} and {@literal &gt;}.
|
||||
*
|
||||
* @param broken the string to handle entities
|
||||
* @return the string with entities appropriately fixed up
|
||||
|
@ -335,9 +327,7 @@ public class XmlCleaner {
|
|||
while (true) {
|
||||
// if we are at the end of the string then just escape the '&';
|
||||
if (i >= working.length()) {
|
||||
return working.substring(0, amp)
|
||||
+ "&"
|
||||
+ working.substring(amp + 1); // $NON-NLS-1$
|
||||
return working.substring(0, amp) + "&" + working.substring(amp + 1); // $NON-NLS-1$
|
||||
}
|
||||
// if we have come to a ; then we have an entity
|
||||
// If it is something that xml can't handle then replace it.
|
||||
|
@ -351,10 +341,7 @@ public class XmlCleaner {
|
|||
// Did we end an entity without finding a closing ;
|
||||
// Then treat it as an '&' that needs to be replaced with &
|
||||
if (!Character.isLetterOrDigit(c)) {
|
||||
working =
|
||||
working.substring(0, amp)
|
||||
+ "&"
|
||||
+ working.substring(amp + 1); // $NON-NLS-1$
|
||||
working = working.substring(0, amp) + "&" + working.substring(amp + 1); // $NON-NLS-1$
|
||||
amp = i + 4; // account for the 4 extra characters
|
||||
break;
|
||||
}
|
||||
|
@ -375,8 +362,8 @@ public class XmlCleaner {
|
|||
}
|
||||
|
||||
/**
|
||||
* Replace entity with its unicode equivalent, if it is not a valid XML entity. Otherwise strip
|
||||
* it out. XML only allows 4 entities: &amp;, &quot;, &lt; and &gt;.
|
||||
* Replace entity with its unicode equivalent, if it is not a valid XML entity. Otherwise strip it
|
||||
* out. XML only allows 4 entities: &amp;, &quot;, &lt; and &gt;.
|
||||
*
|
||||
* @param entity the entity to be replaced
|
||||
* @return the substitution for the entity, either itself, the unicode equivalent or an empty
|
||||
|
|
|
@ -45,15 +45,11 @@ public class TransformFunction implements MapFunction<MetadataRecord, MetadataRe
|
|||
processor.registerExtensionFunction(cleanFunction);
|
||||
final XsltCompiler comp = processor.newXsltCompiler();
|
||||
XsltExecutable xslt =
|
||||
comp.compile(
|
||||
new StreamSource(
|
||||
new ByteArrayInputStream(transformationRule.getBytes())));
|
||||
comp.compile(new StreamSource(new ByteArrayInputStream(transformationRule.getBytes())));
|
||||
XdmNode source =
|
||||
processor
|
||||
.newDocumentBuilder()
|
||||
.build(
|
||||
new StreamSource(
|
||||
new ByteArrayInputStream(value.getBody().getBytes())));
|
||||
.build(new StreamSource(new ByteArrayInputStream(value.getBody().getBytes())));
|
||||
XsltTransformer trans = xslt.load();
|
||||
trans.setInitialContextNode(source);
|
||||
final StringWriter output = new StringWriter();
|
||||
|
|
|
@ -41,8 +41,7 @@ public class TransformSparkJobNode {
|
|||
final String workflowId = parser.get("workflowId");
|
||||
final String trasformationRule =
|
||||
extractXSLTFromTR(
|
||||
Objects.requireNonNull(
|
||||
DHPUtils.decompressString(parser.get("transformationRule"))));
|
||||
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
|
||||
final String master = parser.get("master");
|
||||
final String rabbitUser = parser.get("rabbitUser");
|
||||
final String rabbitPassword = parser.get("rabbitPassword");
|
||||
|
@ -53,10 +52,7 @@ public class TransformSparkJobNode {
|
|||
parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
||||
|
||||
final SparkSession spark =
|
||||
SparkSession.builder()
|
||||
.appName("TransformStoreSparkJob")
|
||||
.master(master)
|
||||
.getOrCreate();
|
||||
SparkSession.builder().appName("TransformStoreSparkJob").master(master).getOrCreate();
|
||||
|
||||
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
||||
final Dataset<MetadataRecord> mdstoreInput =
|
||||
|
@ -85,8 +81,7 @@ public class TransformSparkJobNode {
|
|||
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
||||
if (!test) {
|
||||
final MessageManager manager =
|
||||
new MessageManager(
|
||||
rabbitHost, rabbitUser, rabbitPassword, false, false, null);
|
||||
new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false, null);
|
||||
manager.sendMessage(
|
||||
new Message(workflowId, "Transform", MessageType.REPORT, reportMap),
|
||||
rabbitReportQueue,
|
||||
|
|
|
@ -33,24 +33,34 @@ public class CollectionJobTest {
|
|||
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
|
||||
GenerateNativeStoreSparkJob.main(
|
||||
new String[] {
|
||||
"-mt", "local",
|
||||
"-w", "wid",
|
||||
"-e", "XML",
|
||||
"-d", "" + System.currentTimeMillis(),
|
||||
"-p", new ObjectMapper().writeValueAsString(provenance),
|
||||
"-mt",
|
||||
"local",
|
||||
"-w",
|
||||
"wid",
|
||||
"-e",
|
||||
"XML",
|
||||
"-d",
|
||||
"" + System.currentTimeMillis(),
|
||||
"-p",
|
||||
new ObjectMapper().writeValueAsString(provenance),
|
||||
"-x",
|
||||
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
|
||||
"-i",
|
||||
this.getClass()
|
||||
.getResource("/eu/dnetlib/dhp/collection/native.seq")
|
||||
.toString(),
|
||||
"-o", testDir.toString() + "/store",
|
||||
"-t", "true",
|
||||
"-ru", "",
|
||||
"-rp", "",
|
||||
"-rh", "",
|
||||
"-ro", "",
|
||||
"-rr", ""
|
||||
this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
|
||||
"-o",
|
||||
testDir.toString() + "/store",
|
||||
"-t",
|
||||
"true",
|
||||
"-ru",
|
||||
"",
|
||||
"-rp",
|
||||
"",
|
||||
"-rh",
|
||||
"",
|
||||
"-ro",
|
||||
"",
|
||||
"-rr",
|
||||
""
|
||||
});
|
||||
System.out.println(new ObjectMapper().writeValueAsString(provenance));
|
||||
}
|
||||
|
|
|
@ -33,8 +33,7 @@ public class DnetCollectorWorkerApplicationTests {
|
|||
when(argumentParser.get("workflowId")).thenReturn("sandro");
|
||||
when(argumentParser.get("rabbitOngoingQueue")).thenReturn("sandro");
|
||||
|
||||
when(messageManager.sendMessage(
|
||||
any(Message.class), anyString(), anyBoolean(), anyBoolean()))
|
||||
when(messageManager.sendMessage(any(Message.class), anyString(), anyBoolean(), anyBoolean()))
|
||||
.thenAnswer(
|
||||
a -> {
|
||||
System.out.println("sent message: " + a.getArguments()[0]);
|
||||
|
@ -46,9 +45,7 @@ public class DnetCollectorWorkerApplicationTests {
|
|||
System.out.println("Called");
|
||||
return true;
|
||||
});
|
||||
worker =
|
||||
new DnetCollectorWorker(
|
||||
new CollectorPluginFactory(), argumentParser, messageManager);
|
||||
worker = new DnetCollectorWorker(new CollectorPluginFactory(), argumentParser, messageManager);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
|
@ -43,16 +43,12 @@ public class TransformationJobTest {
|
|||
XsltExecutable exp =
|
||||
comp.compile(
|
||||
new StreamSource(
|
||||
this.getClass()
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/transform/ext_simple.xsl")));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/ext_simple.xsl")));
|
||||
XdmNode source =
|
||||
proc.newDocumentBuilder()
|
||||
.build(
|
||||
new StreamSource(
|
||||
this.getClass()
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/transform/input.xml")));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
||||
XsltTransformer trans = exp.load();
|
||||
trans.setInitialContextNode(source);
|
||||
final StringWriter output = new StringWriter();
|
||||
|
@ -73,22 +69,33 @@ public class TransformationJobTest {
|
|||
final String xslt =
|
||||
DHPUtils.compressString(
|
||||
IOUtils.toString(
|
||||
this.getClass()
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml")));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml")));
|
||||
TransformSparkJobNode.main(
|
||||
new String[] {
|
||||
"-mt", "local",
|
||||
"-i", mdstore_input,
|
||||
"-o", mdstore_output,
|
||||
"-d", "1",
|
||||
"-w", "1",
|
||||
"-tr", xslt,
|
||||
"-t", "true",
|
||||
"-ru", "",
|
||||
"-rp", "",
|
||||
"-rh", "",
|
||||
"-ro", "",
|
||||
"-rr", ""
|
||||
"-mt",
|
||||
"local",
|
||||
"-i",
|
||||
mdstore_input,
|
||||
"-o",
|
||||
mdstore_output,
|
||||
"-d",
|
||||
"1",
|
||||
"-w",
|
||||
"1",
|
||||
"-tr",
|
||||
xslt,
|
||||
"-t",
|
||||
"true",
|
||||
"-ru",
|
||||
"",
|
||||
"-rp",
|
||||
"",
|
||||
"-rh",
|
||||
"",
|
||||
"-ro",
|
||||
"",
|
||||
"-rr",
|
||||
""
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -109,8 +116,7 @@ public class TransformationJobTest {
|
|||
public void testTransformFunction() throws Exception {
|
||||
SAXReader reader = new SAXReader();
|
||||
Document document =
|
||||
reader.read(
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
reader.read(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
||||
final String xslt = node.asXML();
|
||||
Map<String, Vocabulary> vocabularies = new HashMap<>();
|
||||
|
@ -122,8 +128,7 @@ public class TransformationJobTest {
|
|||
MetadataRecord record = new MetadataRecord();
|
||||
record.setBody(
|
||||
IOUtils.toString(
|
||||
this.getClass()
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input.xml")));
|
||||
|
||||
final MetadataRecord result = tf.call(record);
|
||||
assertNotNull(result.getBody());
|
||||
|
@ -135,13 +140,11 @@ public class TransformationJobTest {
|
|||
public void extractTr() throws Exception {
|
||||
|
||||
final String xmlTr =
|
||||
IOUtils.toString(
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
IOUtils.toString(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
|
||||
SAXReader reader = new SAXReader();
|
||||
Document document =
|
||||
reader.read(
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
reader.read(this.getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/tr.xml"));
|
||||
Node node = document.selectSingleNode("//CODE/*[local-name()='stylesheet']");
|
||||
|
||||
System.out.println(node.asXML());
|
||||
|
|
|
@ -40,8 +40,7 @@ abstract class AbstractSparkAction implements Serializable {
|
|||
throws ISLookUpException, DocumentException, IOException {
|
||||
|
||||
final String xquery =
|
||||
String.format(
|
||||
"/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||
String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||
|
||||
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
||||
|
||||
|
|
|
@ -42,11 +42,7 @@ public class DatePicker {
|
|||
.filter(d -> inRange(d.getKey()))
|
||||
.sorted(reverseOrder(comparingByValue()))
|
||||
.collect(
|
||||
toMap(
|
||||
Map.Entry::getKey,
|
||||
Map.Entry::getValue,
|
||||
(e1, e2) -> e2,
|
||||
LinkedHashMap::new));
|
||||
toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e2, LinkedHashMap::new));
|
||||
|
||||
// shortcut
|
||||
if (sorted.size() == 0) {
|
||||
|
@ -67,11 +63,7 @@ public class DatePicker {
|
|||
final int max = sorted.values().iterator().next();
|
||||
Optional<String> first =
|
||||
sorted.entrySet().stream()
|
||||
.filter(
|
||||
e ->
|
||||
e.getValue() == max
|
||||
&& !endsWith(
|
||||
e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||
.filter(e -> e.getValue() == max && !endsWith(e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||
.map(Map.Entry::getKey)
|
||||
.findFirst();
|
||||
if (first.isPresent()) {
|
||||
|
@ -88,9 +80,7 @@ public class DatePicker {
|
|||
return date;
|
||||
} else {
|
||||
final Optional<String> first =
|
||||
accepted.stream()
|
||||
.filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX))
|
||||
.findFirst();
|
||||
accepted.stream().filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX)).findFirst();
|
||||
if (first.isPresent()) {
|
||||
date.setValue(first.get());
|
||||
return date;
|
||||
|
|
|
@ -34,7 +34,8 @@ public class DedupRecordFactory {
|
|||
|
||||
// <id, json_entity>
|
||||
Dataset<Tuple2<String, T>> entities =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.textFile(entitiesInputPath)
|
||||
.map(
|
||||
(MapFunction<String, Tuple2<String, T>>)
|
||||
|
@ -46,7 +47,8 @@ public class DedupRecordFactory {
|
|||
|
||||
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
||||
Dataset<Tuple2<String, String>> mergeRels =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(mergeRelsInputPath)
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass == 'merges'")
|
||||
|
@ -58,14 +60,11 @@ public class DedupRecordFactory {
|
|||
return mergeRels
|
||||
.joinWith(entities, mergeRels.col("_2").equalTo(entities.col("_1")), "inner")
|
||||
.map(
|
||||
(MapFunction<
|
||||
Tuple2<Tuple2<String, String>, Tuple2<String, T>>,
|
||||
Tuple2<String, T>>)
|
||||
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, T>>, Tuple2<String, T>>)
|
||||
value -> new Tuple2<>(value._1()._1(), value._2()._2()),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
|
||||
.groupByKey(
|
||||
(MapFunction<Tuple2<String, T>, String>) entity -> entity._1(),
|
||||
Encoders.STRING())
|
||||
(MapFunction<Tuple2<String, T>, String>) entity -> entity._1(), Encoders.STRING())
|
||||
.mapGroups(
|
||||
(MapGroupsFunction<String, Tuple2<String, T>, T>)
|
||||
(key, values) -> entityMerger(key, values, ts, dataInfo),
|
||||
|
|
|
@ -35,14 +35,12 @@ public class DedupUtility {
|
|||
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
||||
|
||||
String acc1 =
|
||||
String.format(
|
||||
"%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||
String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||
accumulators.put(acc1, context.longAccumulator(acc1));
|
||||
String acc2 =
|
||||
String.format(
|
||||
"%s::%s",
|
||||
dedupConf.getWf().getEntityType(),
|
||||
"missing " + dedupConf.getWf().getOrderField());
|
||||
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField());
|
||||
accumulators.put(acc2, context.longAccumulator(acc2));
|
||||
String acc3 =
|
||||
String.format(
|
||||
|
@ -50,8 +48,7 @@ public class DedupUtility {
|
|||
dedupConf.getWf().getEntityType(),
|
||||
String.format(
|
||||
"Skipped records for count(%s) >= %s",
|
||||
dedupConf.getWf().getOrderField(),
|
||||
dedupConf.getWf().getGroupMaxSize()));
|
||||
dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()));
|
||||
accumulators.put(acc3, context.longAccumulator(acc3));
|
||||
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
||||
accumulators.put(acc4, context.longAccumulator(acc4));
|
||||
|
@ -60,9 +57,7 @@ public class DedupUtility {
|
|||
accumulators.put(acc5, context.longAccumulator(acc5));
|
||||
String acc6 =
|
||||
String.format(
|
||||
"%s::%s",
|
||||
dedupConf.getWf().getEntityType(),
|
||||
"d < " + dedupConf.getWf().getThreshold());
|
||||
"%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold());
|
||||
accumulators.put(acc6, context.longAccumulator(acc6));
|
||||
|
||||
return accumulators;
|
||||
|
@ -106,10 +101,7 @@ public class DedupUtility {
|
|||
final Map<String, Author> basePidAuthorMap =
|
||||
base.stream()
|
||||
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
||||
.flatMap(
|
||||
a ->
|
||||
a.getPid().stream()
|
||||
.map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||
.flatMap(a -> a.getPid().stream().map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
||||
|
||||
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
||||
|
@ -118,10 +110,7 @@ public class DedupUtility {
|
|||
.flatMap(
|
||||
a ->
|
||||
a.getPid().stream()
|
||||
.filter(
|
||||
p ->
|
||||
!basePidAuthorMap.containsKey(
|
||||
p.toComparableString()))
|
||||
.filter(p -> !basePidAuthorMap.containsKey(p.toComparableString()))
|
||||
.map(p -> new Tuple2<>(p, a)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
@ -167,14 +156,13 @@ public class DedupUtility {
|
|||
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
||||
} else {
|
||||
return new JaroWinkler()
|
||||
.score(
|
||||
normalize(pa.getNormalisedFullname()),
|
||||
normalize(pb.getNormalisedFullname()));
|
||||
.score(normalize(pa.getNormalisedFullname()), normalize(pb.getNormalisedFullname()));
|
||||
}
|
||||
}
|
||||
|
||||
private static String normalize(final String s) {
|
||||
return nfd(s).toLowerCase()
|
||||
return nfd(s)
|
||||
.toLowerCase()
|
||||
// do not compact the regexes in a single expression, would cause StackOverflowError
|
||||
// in case
|
||||
// of large input strings
|
||||
|
@ -219,8 +207,7 @@ public class DedupUtility {
|
|||
final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
||||
|
||||
final String xquery =
|
||||
String.format(
|
||||
"/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||
String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
|
||||
|
||||
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ public class Deduper implements Serializable {
|
|||
Map<String, LongAccumulator> accumulators =
|
||||
DedupUtility.constructAccumulator(config, context.sc());
|
||||
|
||||
return blocks.flatMapToPair(
|
||||
return blocks
|
||||
.flatMapToPair(
|
||||
it -> {
|
||||
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||
new BlockProcessor(config)
|
||||
|
|
|
@ -73,8 +73,7 @@ public class SparkCreateDedupRecord extends AbstractSparkAction {
|
|||
DedupUtility.createMergeRelPath(workingPath, actionSetId, subEntity);
|
||||
final String entityPath = DedupUtility.createEntityPath(graphBasePath, subEntity);
|
||||
|
||||
final Class<OafEntity> clazz =
|
||||
ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
||||
final Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
||||
final DataInfo dataInfo = getDataInfo(dedupConf);
|
||||
DedupRecordFactory.createDedupRecord(spark, dataInfo, mergeRelPath, entityPath, clazz)
|
||||
.write()
|
||||
|
|
|
@ -91,27 +91,16 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
|||
|
||||
final JavaPairRDD<Object, String> vertexes =
|
||||
sc.textFile(graphBasePath + "/" + subEntity)
|
||||
.map(
|
||||
s ->
|
||||
MapDocumentUtil.getJPathString(
|
||||
dedupConf.getWf().getIdPath(), s))
|
||||
.mapToPair(
|
||||
(PairFunction<String, Object, String>)
|
||||
s -> new Tuple2<>(hash(s), s));
|
||||
.map(s -> MapDocumentUtil.getJPathString(dedupConf.getWf().getIdPath(), s))
|
||||
.mapToPair((PairFunction<String, Object, String>) s -> new Tuple2<>(hash(s), s));
|
||||
|
||||
final RDD<Edge<String>> edgeRdd =
|
||||
spark.read()
|
||||
.load(
|
||||
DedupUtility.createSimRelPath(
|
||||
workingPath, actionSetId, subEntity))
|
||||
spark
|
||||
.read()
|
||||
.load(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity))
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.javaRDD()
|
||||
.map(
|
||||
it ->
|
||||
new Edge<>(
|
||||
hash(it.getSource()),
|
||||
hash(it.getTarget()),
|
||||
it.getRelClass()))
|
||||
.map(it -> new Edge<>(hash(it.getSource()), hash(it.getTarget()), it.getRelClass()))
|
||||
.rdd();
|
||||
|
||||
final Dataset<Relation> mergeRels =
|
||||
|
|
|
@ -46,9 +46,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
|||
SparkConf conf = new SparkConf();
|
||||
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
||||
conf.registerKryoClasses(
|
||||
new Class[] {
|
||||
MapDocument.class, FieldListImpl.class, FieldValueImpl.class, Block.class
|
||||
});
|
||||
new Class[] {MapDocument.class, FieldListImpl.class, FieldValueImpl.class, Block.class});
|
||||
|
||||
new SparkCreateSimRels(parser, getSparkSession(conf))
|
||||
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
||||
|
@ -76,8 +74,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
|||
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
||||
log.info("Creating simrels for: '{}'", subEntity);
|
||||
|
||||
final String outputPath =
|
||||
DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
||||
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
|
||||
removeOutputDir(spark, outputPath);
|
||||
|
||||
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
@ -87,9 +84,7 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
|||
.mapToPair(
|
||||
(PairFunction<String, String, MapDocument>)
|
||||
s -> {
|
||||
MapDocument d =
|
||||
MapDocumentUtil.asMapDocumentWithJPath(
|
||||
dedupConf, s);
|
||||
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||
return new Tuple2<>(d.getIdentifier(), d);
|
||||
});
|
||||
|
||||
|
@ -102,7 +97,8 @@ public class SparkCreateSimRels extends AbstractSparkAction {
|
|||
.map(t -> createSimRel(t._1(), t._2(), entity));
|
||||
|
||||
// save the simrel in the workingdir
|
||||
spark.createDataset(relations.rdd(), Encoders.bean(Relation.class))
|
||||
spark
|
||||
.createDataset(relations.rdd(), Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.save(outputPath);
|
||||
|
|
|
@ -62,7 +62,8 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
removeOutputDir(spark, outputRelationPath);
|
||||
|
||||
Dataset<Relation> mergeRels =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(DedupUtility.createMergeRelPath(workingPath, "*", "*"))
|
||||
.as(Encoders.bean(Relation.class));
|
||||
|
||||
|
@ -80,17 +81,11 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
|
||||
|
||||
Dataset<Relation> rels =
|
||||
spark.read()
|
||||
.textFile(relationPath)
|
||||
.map(patchRelFn(), Encoders.bean(Relation.class));
|
||||
spark.read().textFile(relationPath).map(patchRelFn(), Encoders.bean(Relation.class));
|
||||
|
||||
Dataset<Relation> newRels =
|
||||
processDataset(
|
||||
processDataset(
|
||||
rels,
|
||||
mergedIds,
|
||||
FieldType.SOURCE,
|
||||
getFixRelFn(FieldType.SOURCE)),
|
||||
processDataset(rels, mergedIds, FieldType.SOURCE, getFixRelFn(FieldType.SOURCE)),
|
||||
mergedIds,
|
||||
FieldType.TARGET,
|
||||
getFixRelFn(FieldType.TARGET))
|
||||
|
@ -113,11 +108,10 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
|||
MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> mapFn) {
|
||||
final Dataset<Tuple2<String, Relation>> mapped =
|
||||
rels.map(
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>)
|
||||
r -> new Tuple2<>(getId(r, type), r),
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(getId(r, type), r),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class)));
|
||||
return mapped.joinWith(
|
||||
mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer")
|
||||
return mapped
|
||||
.joinWith(mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer")
|
||||
.map(mapFn, Encoders.bean(Relation.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -74,16 +74,14 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
|||
removeOutputDir(spark, outputPath);
|
||||
|
||||
JavaRDD<String> sourceEntity =
|
||||
sc.textFile(
|
||||
DedupUtility.createEntityPath(graphBasePath, type.toString()));
|
||||
sc.textFile(DedupUtility.createEntityPath(graphBasePath, type.toString()));
|
||||
|
||||
if (mergeRelExists(workingPath, type.toString())) {
|
||||
|
||||
final String mergeRelPath =
|
||||
DedupUtility.createMergeRelPath(workingPath, "*", type.toString());
|
||||
final String dedupRecordPath =
|
||||
DedupUtility.createDedupRecordPath(
|
||||
workingPath, "*", type.toString());
|
||||
DedupUtility.createDedupRecordPath(workingPath, "*", type.toString());
|
||||
|
||||
final Dataset<Relation> rel =
|
||||
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||
|
@ -94,25 +92,19 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
|||
.distinct()
|
||||
.toJavaRDD()
|
||||
.mapToPair(
|
||||
(PairFunction<Row, String, String>)
|
||||
r -> new Tuple2<>(r.getString(0), "d"));
|
||||
(PairFunction<Row, String, String>) r -> new Tuple2<>(r.getString(0), "d"));
|
||||
|
||||
JavaPairRDD<String, String> entitiesWithId =
|
||||
sourceEntity.mapToPair(
|
||||
(PairFunction<String, String, String>)
|
||||
s ->
|
||||
new Tuple2<>(
|
||||
MapDocumentUtil.getJPathString(
|
||||
IDJSONPATH, s),
|
||||
s));
|
||||
s -> new Tuple2<>(MapDocumentUtil.getJPathString(IDJSONPATH, s), s));
|
||||
JavaRDD<String> map =
|
||||
entitiesWithId
|
||||
.leftOuterJoin(mergedIds)
|
||||
.map(
|
||||
k ->
|
||||
k._2()._2().isPresent()
|
||||
? updateDeletedByInference(
|
||||
k._2()._1(), clazz)
|
||||
? updateDeletedByInference(k._2()._1(), clazz)
|
||||
: k._2()._1());
|
||||
|
||||
sourceEntity = map.union(sc.textFile(dedupRecordPath));
|
||||
|
@ -133,9 +125,7 @@ public class SparkUpdateEntity extends AbstractSparkAction {
|
|||
for (FileStatus fs : fileStatuses) {
|
||||
if (fs.isDirectory())
|
||||
if (fileSystem.exists(
|
||||
new Path(
|
||||
DedupUtility.createMergeRelPath(
|
||||
basePath, fs.getPath().getName(), entity))))
|
||||
new Path(DedupUtility.createMergeRelPath(basePath, fs.getPath().getName(), entity))))
|
||||
result = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -37,9 +37,7 @@ public class Block implements Serializable {
|
|||
block.setDocuments(
|
||||
StreamSupport.stream(it.spliterator(), false)
|
||||
.flatMap(b -> b.getDocuments().stream())
|
||||
.sorted(
|
||||
Comparator.comparing(
|
||||
a -> a.getFieldMap().get(orderField).stringValue()))
|
||||
.sorted(Comparator.comparing(a -> a.getFieldMap().get(orderField).stringValue()))
|
||||
.limit(maxSize)
|
||||
.collect(Collectors.toCollection(ArrayList::new)));
|
||||
return block;
|
||||
|
@ -50,9 +48,7 @@ public class Block implements Serializable {
|
|||
block.setKey(b1.getKey());
|
||||
block.setDocuments(
|
||||
Stream.concat(b1.getDocuments().stream(), b2.getDocuments().stream())
|
||||
.sorted(
|
||||
Comparator.comparing(
|
||||
a -> a.getFieldMap().get(orderField).stringValue()))
|
||||
.sorted(Comparator.comparing(a -> a.getFieldMap().get(orderField).stringValue()))
|
||||
.limit(maxSize)
|
||||
.collect(Collectors.toCollection(ArrayList::new)));
|
||||
|
||||
|
|
|
@ -18,9 +18,7 @@ public class MergeAuthorTest {
|
|||
public void setUp() throws Exception {
|
||||
final String json =
|
||||
IOUtils.toString(
|
||||
this.getClass()
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/dedup/json/authors_merge.json"));
|
||||
this.getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/json/authors_merge.json"));
|
||||
|
||||
publicationsToMerge =
|
||||
Arrays.asList(json.split("\n")).stream()
|
||||
|
|
|
@ -51,20 +51,13 @@ public class SparkDedupTest implements Serializable {
|
|||
public static void cleanUp() throws IOException, URISyntaxException {
|
||||
|
||||
testGraphBasePath =
|
||||
Paths.get(
|
||||
SparkDedupTest.class
|
||||
.getResource("/eu/dnetlib/dhp/dedup/entities")
|
||||
.toURI())
|
||||
Paths.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI())
|
||||
.toFile()
|
||||
.getAbsolutePath();
|
||||
testOutputBasePath =
|
||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
|
||||
testDedupGraphBasePath =
|
||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
||||
.toAbsolutePath()
|
||||
.toString();
|
||||
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
|
||||
|
||||
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
||||
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
|
||||
|
@ -118,9 +111,7 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/dedup/conf/ds.curr.conf.json")));
|
||||
|
||||
lenient()
|
||||
.when(
|
||||
isLookUpService.getResourceProfileByQuery(
|
||||
Mockito.contains("otherresearchproduct")))
|
||||
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("otherresearchproduct")))
|
||||
.thenReturn(
|
||||
IOUtils.toString(
|
||||
SparkDedupTest.class.getResourceAsStream(
|
||||
|
@ -138,39 +129,38 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
|
||||
parser.parseArgument(
|
||||
new String[] {
|
||||
"-i", testGraphBasePath,
|
||||
"-asi", testActionSetId,
|
||||
"-la", "lookupurl",
|
||||
"-w", testOutputBasePath
|
||||
"-i",
|
||||
testGraphBasePath,
|
||||
"-asi",
|
||||
testActionSetId,
|
||||
"-la",
|
||||
"lookupurl",
|
||||
"-w",
|
||||
testOutputBasePath
|
||||
});
|
||||
|
||||
new SparkCreateSimRels(parser, spark).run(isLookUpService);
|
||||
|
||||
long orgs_simrel =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_simrel")
|
||||
.count();
|
||||
long pubs_simrel =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_simrel")
|
||||
.count();
|
||||
long sw_simrel =
|
||||
spark.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_simrel")
|
||||
.count();
|
||||
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/software_simrel").count();
|
||||
|
||||
long ds_simrel =
|
||||
spark.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_simrel")
|
||||
.count();
|
||||
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/dataset_simrel").count();
|
||||
|
||||
long orp_simrel =
|
||||
spark.read()
|
||||
.load(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/otherresearchproduct_simrel")
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_simrel")
|
||||
.count();
|
||||
|
||||
assertEquals(3432, orgs_simrel);
|
||||
|
@ -191,39 +181,41 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
|
||||
parser.parseArgument(
|
||||
new String[] {
|
||||
"-i", testGraphBasePath,
|
||||
"-asi", testActionSetId,
|
||||
"-la", "lookupurl",
|
||||
"-w", testOutputBasePath
|
||||
"-i",
|
||||
testGraphBasePath,
|
||||
"-asi",
|
||||
testActionSetId,
|
||||
"-la",
|
||||
"lookupurl",
|
||||
"-w",
|
||||
testOutputBasePath
|
||||
});
|
||||
|
||||
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
||||
|
||||
long orgs_mergerel =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
||||
.count();
|
||||
long pubs_mergerel =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
||||
.count();
|
||||
long sw_mergerel =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
||||
.count();
|
||||
|
||||
long ds_mergerel =
|
||||
spark.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
||||
.count();
|
||||
spark.read().load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel").count();
|
||||
|
||||
long orp_mergerel =
|
||||
spark.read()
|
||||
.load(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/otherresearchproduct_mergerel")
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
||||
.count();
|
||||
|
||||
assertEquals(1276, orgs_mergerel);
|
||||
|
@ -244,40 +236,31 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json")));
|
||||
parser.parseArgument(
|
||||
new String[] {
|
||||
"-i", testGraphBasePath,
|
||||
"-asi", testActionSetId,
|
||||
"-la", "lookupurl",
|
||||
"-w", testOutputBasePath
|
||||
"-i",
|
||||
testGraphBasePath,
|
||||
"-asi",
|
||||
testActionSetId,
|
||||
"-la",
|
||||
"lookupurl",
|
||||
"-w",
|
||||
testOutputBasePath
|
||||
});
|
||||
|
||||
new SparkCreateDedupRecord(parser, spark).run(isLookUpService);
|
||||
|
||||
long orgs_deduprecord =
|
||||
jsc.textFile(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/organization_deduprecord")
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_deduprecord")
|
||||
.count();
|
||||
long pubs_deduprecord =
|
||||
jsc.textFile(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/publication_deduprecord")
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/publication_deduprecord")
|
||||
.count();
|
||||
long sw_deduprecord =
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord")
|
||||
.count();
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord").count();
|
||||
long ds_deduprecord =
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/dataset_deduprecord")
|
||||
.count();
|
||||
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/dataset_deduprecord").count();
|
||||
long orp_deduprecord =
|
||||
jsc.textFile(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/otherresearchproduct_deduprecord")
|
||||
testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_deduprecord")
|
||||
.count();
|
||||
|
||||
assertEquals(82, orgs_deduprecord);
|
||||
|
@ -298,9 +281,7 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
|
||||
parser.parseArgument(
|
||||
new String[] {
|
||||
"-i", testGraphBasePath,
|
||||
"-w", testOutputBasePath,
|
||||
"-o", testDedupGraphBasePath
|
||||
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
||||
});
|
||||
|
||||
new SparkUpdateEntity(parser, spark).run(isLookUpService);
|
||||
|
@ -315,7 +296,8 @@ public class SparkDedupTest implements Serializable {
|
|||
jsc.textFile(testDedupGraphBasePath + "/otherresearchproduct").count();
|
||||
|
||||
long mergedOrgs =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
|
@ -325,7 +307,8 @@ public class SparkDedupTest implements Serializable {
|
|||
.count();
|
||||
|
||||
long mergedPubs =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
|
@ -335,7 +318,8 @@ public class SparkDedupTest implements Serializable {
|
|||
.count();
|
||||
|
||||
long mergedSw =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
|
@ -345,7 +329,8 @@ public class SparkDedupTest implements Serializable {
|
|||
.count();
|
||||
|
||||
long mergedDs =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
|
@ -355,12 +340,9 @@ public class SparkDedupTest implements Serializable {
|
|||
.count();
|
||||
|
||||
long mergedOrp =
|
||||
spark.read()
|
||||
.load(
|
||||
testOutputBasePath
|
||||
+ "/"
|
||||
+ testActionSetId
|
||||
+ "/otherresearchproduct_mergerel")
|
||||
spark
|
||||
.read()
|
||||
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
.javaRDD()
|
||||
|
@ -419,9 +401,7 @@ public class SparkDedupTest implements Serializable {
|
|||
"/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
|
||||
parser.parseArgument(
|
||||
new String[] {
|
||||
"-i", testGraphBasePath,
|
||||
"-w", testOutputBasePath,
|
||||
"-o", testDedupGraphBasePath
|
||||
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
||||
});
|
||||
|
||||
new SparkPropagateRelation(parser, spark).run(isLookUpService);
|
||||
|
@ -432,7 +412,8 @@ public class SparkDedupTest implements Serializable {
|
|||
|
||||
// check deletedbyinference
|
||||
final Dataset<Relation> mergeRels =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*"))
|
||||
.as(Encoders.bean(Relation.class));
|
||||
final JavaPairRDD<String, String> mergedIds =
|
||||
|
@ -447,18 +428,10 @@ public class SparkDedupTest implements Serializable {
|
|||
|
||||
JavaRDD<String> toCheck =
|
||||
jsc.textFile(testDedupGraphBasePath + "/relation")
|
||||
.mapToPair(
|
||||
json ->
|
||||
new Tuple2<>(
|
||||
MapDocumentUtil.getJPathString("$.source", json),
|
||||
json))
|
||||
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.source", json), json))
|
||||
.join(mergedIds)
|
||||
.map(t -> t._2()._1())
|
||||
.mapToPair(
|
||||
json ->
|
||||
new Tuple2<>(
|
||||
MapDocumentUtil.getJPathString("$.target", json),
|
||||
json))
|
||||
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.target", json), json))
|
||||
.join(mergedIds)
|
||||
.map(t -> t._2()._1());
|
||||
|
||||
|
|
|
@ -42,11 +42,7 @@ public class DatePicker {
|
|||
.filter(d -> inRange(d.getKey()))
|
||||
.sorted(reverseOrder(comparingByValue()))
|
||||
.collect(
|
||||
toMap(
|
||||
Map.Entry::getKey,
|
||||
Map.Entry::getValue,
|
||||
(e1, e2) -> e2,
|
||||
LinkedHashMap::new));
|
||||
toMap(Map.Entry::getKey, Map.Entry::getValue, (e1, e2) -> e2, LinkedHashMap::new));
|
||||
|
||||
// shortcut
|
||||
if (sorted.size() == 0) {
|
||||
|
@ -67,11 +63,7 @@ public class DatePicker {
|
|||
final int max = sorted.values().iterator().next();
|
||||
Optional<String> first =
|
||||
sorted.entrySet().stream()
|
||||
.filter(
|
||||
e ->
|
||||
e.getValue() == max
|
||||
&& !endsWith(
|
||||
e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||
.filter(e -> e.getValue() == max && !endsWith(e.getKey(), DATE_DEFAULT_SUFFIX))
|
||||
.map(Map.Entry::getKey)
|
||||
.findFirst();
|
||||
if (first.isPresent()) {
|
||||
|
@ -88,9 +80,7 @@ public class DatePicker {
|
|||
return date;
|
||||
} else {
|
||||
final Optional<String> first =
|
||||
accepted.stream()
|
||||
.filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX))
|
||||
.findFirst();
|
||||
accepted.stream().filter(d -> !endsWith(d, DATE_DEFAULT_SUFFIX)).findFirst();
|
||||
if (first.isPresent()) {
|
||||
date.setValue(first.get());
|
||||
return date;
|
||||
|
|
|
@ -32,33 +32,26 @@ public class DedupRecordFactory {
|
|||
(PairFunction<String, String, String>)
|
||||
it ->
|
||||
new Tuple2<String, String>(
|
||||
MapDocumentUtil.getJPathString(
|
||||
dedupConf.getWf().getIdPath(), it),
|
||||
it));
|
||||
MapDocumentUtil.getJPathString(dedupConf.getWf().getIdPath(), it), it));
|
||||
|
||||
// <source, target>: source is the dedup_id, target is the id of the mergedIn
|
||||
JavaPairRDD<String, String> mergeRels =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(mergeRelsInputPath)
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass=='merges'")
|
||||
.javaRDD()
|
||||
.mapToPair(
|
||||
(PairFunction<Relation, String, String>)
|
||||
r ->
|
||||
new Tuple2<String, String>(
|
||||
r.getTarget(), r.getSource()));
|
||||
r -> new Tuple2<String, String>(r.getTarget(), r.getSource()));
|
||||
|
||||
// <dedup_id, json_entity_merged>
|
||||
final JavaPairRDD<String, String> joinResult =
|
||||
mergeRels
|
||||
.join(inputJsonEntities)
|
||||
.mapToPair(
|
||||
(PairFunction<
|
||||
Tuple2<String, Tuple2<String, String>>,
|
||||
String,
|
||||
String>)
|
||||
Tuple2::_2);
|
||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||
|
||||
JavaPairRDD<String, Iterable<String>> sortedJoinResult = joinResult.groupByKey();
|
||||
|
||||
|
@ -76,15 +69,13 @@ public class DedupRecordFactory {
|
|||
case organization:
|
||||
return sortedJoinResult.map(o -> DedupRecordFactory.organizationMerger(o, ts));
|
||||
case otherresearchproduct:
|
||||
return sortedJoinResult.map(
|
||||
o -> DedupRecordFactory.otherresearchproductMerger(o, ts));
|
||||
return sortedJoinResult.map(o -> DedupRecordFactory.otherresearchproductMerger(o, ts));
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static Publication publicationMerger(
|
||||
Tuple2<String, Iterable<String>> e, final long ts) {
|
||||
private static Publication publicationMerger(Tuple2<String, Iterable<String>> e, final long ts) {
|
||||
|
||||
Publication p = new Publication(); // the result of the merge, to be returned at the end
|
||||
|
||||
|
@ -96,20 +87,17 @@ public class DedupRecordFactory {
|
|||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
pub -> {
|
||||
try {
|
||||
Publication publication =
|
||||
mapper.readValue(pub, Publication.class);
|
||||
Publication publication = mapper.readValue(pub, Publication.class);
|
||||
|
||||
p.mergeFrom(publication);
|
||||
p.setAuthor(
|
||||
DedupUtility.mergeAuthor(
|
||||
p.getAuthor(), publication.getAuthor()));
|
||||
p.setAuthor(DedupUtility.mergeAuthor(p.getAuthor(), publication.getAuthor()));
|
||||
// add to the list if they are not null
|
||||
if (publication.getDateofacceptance() != null)
|
||||
dateofacceptance.add(
|
||||
publication.getDateofacceptance().getValue());
|
||||
dateofacceptance.add(publication.getDateofacceptance().getValue());
|
||||
} catch (Exception exc) {
|
||||
throw new RuntimeException(exc);
|
||||
}
|
||||
|
@ -133,19 +121,17 @@ public class DedupRecordFactory {
|
|||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
dat -> {
|
||||
try {
|
||||
Dataset dataset = mapper.readValue(dat, Dataset.class);
|
||||
|
||||
d.mergeFrom(dataset);
|
||||
d.setAuthor(
|
||||
DedupUtility.mergeAuthor(
|
||||
d.getAuthor(), dataset.getAuthor()));
|
||||
d.setAuthor(DedupUtility.mergeAuthor(d.getAuthor(), dataset.getAuthor()));
|
||||
// add to the list if they are not null
|
||||
if (dataset.getDateofacceptance() != null)
|
||||
dateofacceptance.add(
|
||||
dataset.getDateofacceptance().getValue());
|
||||
dateofacceptance.add(dataset.getDateofacceptance().getValue());
|
||||
} catch (Exception exc) {
|
||||
throw new RuntimeException(exc);
|
||||
}
|
||||
|
@ -166,7 +152,8 @@ public class DedupRecordFactory {
|
|||
final ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
proj -> {
|
||||
try {
|
||||
Project project = mapper.readValue(proj, Project.class);
|
||||
|
@ -191,19 +178,17 @@ public class DedupRecordFactory {
|
|||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
soft -> {
|
||||
try {
|
||||
Software software = mapper.readValue(soft, Software.class);
|
||||
|
||||
s.mergeFrom(software);
|
||||
s.setAuthor(
|
||||
DedupUtility.mergeAuthor(
|
||||
s.getAuthor(), software.getAuthor()));
|
||||
s.setAuthor(DedupUtility.mergeAuthor(s.getAuthor(), software.getAuthor()));
|
||||
// add to the list if they are not null
|
||||
if (software.getDateofacceptance() != null)
|
||||
dateofacceptance.add(
|
||||
software.getDateofacceptance().getValue());
|
||||
dateofacceptance.add(software.getDateofacceptance().getValue());
|
||||
} catch (Exception exc) {
|
||||
throw new RuntimeException(exc);
|
||||
}
|
||||
|
@ -221,7 +206,8 @@ public class DedupRecordFactory {
|
|||
final ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
dat -> {
|
||||
try {
|
||||
Datasource datasource = mapper.readValue(dat, Datasource.class);
|
||||
|
@ -250,14 +236,13 @@ public class DedupRecordFactory {
|
|||
StringBuilder trust = new StringBuilder("0.0");
|
||||
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
pub -> {
|
||||
try {
|
||||
Organization organization =
|
||||
mapper.readValue(pub, Organization.class);
|
||||
Organization organization = mapper.readValue(pub, Organization.class);
|
||||
|
||||
final String currentTrust =
|
||||
organization.getDataInfo().getTrust();
|
||||
final String currentTrust = organization.getDataInfo().getTrust();
|
||||
if (!"1.0".equals(currentTrust)) {
|
||||
trust.setLength(0);
|
||||
trust.append(currentTrust);
|
||||
|
@ -282,8 +267,8 @@ public class DedupRecordFactory {
|
|||
private static OtherResearchProduct otherresearchproductMerger(
|
||||
Tuple2<String, Iterable<String>> e, final long ts) {
|
||||
|
||||
OtherResearchProduct o =
|
||||
new OtherResearchProduct(); // the result of the merge, to be returned at the end
|
||||
OtherResearchProduct o = new OtherResearchProduct(); // the result of the merge, to be
|
||||
// returned at the end
|
||||
|
||||
o.setId(e._1());
|
||||
|
||||
|
@ -293,7 +278,8 @@ public class DedupRecordFactory {
|
|||
final Collection<String> dateofacceptance = Lists.newArrayList();
|
||||
|
||||
if (e._2() != null)
|
||||
e._2().forEach(
|
||||
e._2()
|
||||
.forEach(
|
||||
orp -> {
|
||||
try {
|
||||
OtherResearchProduct otherResearchProduct =
|
||||
|
@ -301,15 +287,10 @@ public class DedupRecordFactory {
|
|||
|
||||
o.mergeFrom(otherResearchProduct);
|
||||
o.setAuthor(
|
||||
DedupUtility.mergeAuthor(
|
||||
o.getAuthor(),
|
||||
otherResearchProduct.getAuthor()));
|
||||
DedupUtility.mergeAuthor(o.getAuthor(), otherResearchProduct.getAuthor()));
|
||||
// add to the list if they are not null
|
||||
if (otherResearchProduct.getDateofacceptance() != null)
|
||||
dateofacceptance.add(
|
||||
otherResearchProduct
|
||||
.getDateofacceptance()
|
||||
.getValue());
|
||||
dateofacceptance.add(otherResearchProduct.getDateofacceptance().getValue());
|
||||
} catch (Exception exc) {
|
||||
throw new RuntimeException(exc);
|
||||
}
|
||||
|
|
|
@ -37,14 +37,12 @@ public class DedupUtility {
|
|||
Map<String, LongAccumulator> accumulators = new HashMap<>();
|
||||
|
||||
String acc1 =
|
||||
String.format(
|
||||
"%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||
String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1");
|
||||
accumulators.put(acc1, context.longAccumulator(acc1));
|
||||
String acc2 =
|
||||
String.format(
|
||||
"%s::%s",
|
||||
dedupConf.getWf().getEntityType(),
|
||||
"missing " + dedupConf.getWf().getOrderField());
|
||||
dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField());
|
||||
accumulators.put(acc2, context.longAccumulator(acc2));
|
||||
String acc3 =
|
||||
String.format(
|
||||
|
@ -52,8 +50,7 @@ public class DedupUtility {
|
|||
dedupConf.getWf().getEntityType(),
|
||||
String.format(
|
||||
"Skipped records for count(%s) >= %s",
|
||||
dedupConf.getWf().getOrderField(),
|
||||
dedupConf.getWf().getGroupMaxSize()));
|
||||
dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()));
|
||||
accumulators.put(acc3, context.longAccumulator(acc3));
|
||||
String acc4 = String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list");
|
||||
accumulators.put(acc4, context.longAccumulator(acc4));
|
||||
|
@ -62,9 +59,7 @@ public class DedupUtility {
|
|||
accumulators.put(acc5, context.longAccumulator(acc5));
|
||||
String acc6 =
|
||||
String.format(
|
||||
"%s::%s",
|
||||
dedupConf.getWf().getEntityType(),
|
||||
"d < " + dedupConf.getWf().getThreshold());
|
||||
"%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold());
|
||||
accumulators.put(acc6, context.longAccumulator(acc6));
|
||||
|
||||
return accumulators;
|
||||
|
@ -139,10 +134,7 @@ public class DedupUtility {
|
|||
final Map<String, Author> basePidAuthorMap =
|
||||
base.stream()
|
||||
.filter(a -> a.getPid() != null && a.getPid().size() > 0)
|
||||
.flatMap(
|
||||
a ->
|
||||
a.getPid().stream()
|
||||
.map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||
.flatMap(a -> a.getPid().stream().map(p -> new Tuple2<>(p.toComparableString(), a)))
|
||||
.collect(Collectors.toMap(Tuple2::_1, Tuple2::_2, (x1, x2) -> x1));
|
||||
|
||||
final List<Tuple2<StructuredProperty, Author>> pidToEnrich =
|
||||
|
@ -151,10 +143,7 @@ public class DedupUtility {
|
|||
.flatMap(
|
||||
a ->
|
||||
a.getPid().stream()
|
||||
.filter(
|
||||
p ->
|
||||
!basePidAuthorMap.containsKey(
|
||||
p.toComparableString()))
|
||||
.filter(p -> !basePidAuthorMap.containsKey(p.toComparableString()))
|
||||
.map(p -> new Tuple2<>(p, a)))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
@ -193,14 +182,13 @@ public class DedupUtility {
|
|||
.score(normalize(pa.getSurnameString()), normalize(pb.getSurnameString()));
|
||||
} else {
|
||||
return new JaroWinkler()
|
||||
.score(
|
||||
normalize(pa.getNormalisedFullname()),
|
||||
normalize(pb.getNormalisedFullname()));
|
||||
.score(normalize(pa.getNormalisedFullname()), normalize(pb.getNormalisedFullname()));
|
||||
}
|
||||
}
|
||||
|
||||
private static String normalize(final String s) {
|
||||
return nfd(s).toLowerCase()
|
||||
return nfd(s)
|
||||
.toLowerCase()
|
||||
// do not compact the regexes in a single expression, would cause StackOverflowError
|
||||
// in case
|
||||
// of large input strings
|
||||
|
|
|
@ -70,7 +70,8 @@ public class Deduper implements Serializable {
|
|||
Map<String, LongAccumulator> accumulators =
|
||||
DedupUtility.constructAccumulator(config, context.sc());
|
||||
|
||||
return blocks.flatMapToPair(
|
||||
return blocks
|
||||
.flatMapToPair(
|
||||
(PairFlatMapFunction<Tuple2<String, Iterable<MapDocument>>, String, String>)
|
||||
it -> {
|
||||
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||
|
@ -79,13 +80,10 @@ public class Deduper implements Serializable {
|
|||
})
|
||||
.mapToPair(
|
||||
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
||||
item ->
|
||||
new Tuple2<String, Tuple2<String, String>>(
|
||||
item._1() + item._2(), item))
|
||||
item -> new Tuple2<String, Tuple2<String, String>>(item._1() + item._2(), item))
|
||||
.reduceByKey((a, b) -> a)
|
||||
.mapToPair(
|
||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>)
|
||||
Tuple2::_2);
|
||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -95,9 +93,7 @@ public class Deduper implements Serializable {
|
|||
* @param: the dedup configuration
|
||||
*/
|
||||
public static JavaPairRDD<String, Iterable<MapDocument>> createBlocks(
|
||||
JavaSparkContext context,
|
||||
JavaPairRDD<String, MapDocument> mapDocs,
|
||||
DedupConfig config) {
|
||||
JavaSparkContext context, JavaPairRDD<String, MapDocument> mapDocs, DedupConfig config) {
|
||||
return mapDocs
|
||||
// the reduce is just to be sure that we haven't document with same id
|
||||
.reduceByKey((a, b) -> a)
|
||||
|
@ -114,9 +110,7 @@ public class Deduper implements Serializable {
|
|||
}
|
||||
|
||||
public static JavaPairRDD<String, List<MapDocument>> createsortedBlocks(
|
||||
JavaSparkContext context,
|
||||
JavaPairRDD<String, MapDocument> mapDocs,
|
||||
DedupConfig config) {
|
||||
JavaSparkContext context, JavaPairRDD<String, MapDocument> mapDocs, DedupConfig config) {
|
||||
final String of = config.getWf().getOrderField();
|
||||
final int maxQueueSize = config.getWf().getGroupMaxSize();
|
||||
return mapDocs
|
||||
|
@ -130,8 +124,7 @@ public class Deduper implements Serializable {
|
|||
DedupUtility.getGroupingKeys(config, a).stream()
|
||||
.map(
|
||||
it -> {
|
||||
List<MapDocument> tmp =
|
||||
new ArrayList<>();
|
||||
List<MapDocument> tmp = new ArrayList<>();
|
||||
tmp.add(a);
|
||||
return new Tuple2<>(it, tmp);
|
||||
})
|
||||
|
@ -141,11 +134,8 @@ public class Deduper implements Serializable {
|
|||
(Function2<List<MapDocument>, List<MapDocument>, List<MapDocument>>)
|
||||
(v1, v2) -> {
|
||||
v1.addAll(v2);
|
||||
v1.sort(
|
||||
Comparator.comparing(
|
||||
a -> a.getFieldMap().get(of).stringValue()));
|
||||
if (v1.size() > maxQueueSize)
|
||||
return new ArrayList<>(v1.subList(0, maxQueueSize));
|
||||
v1.sort(Comparator.comparing(a -> a.getFieldMap().get(of).stringValue()));
|
||||
if (v1.size() > maxQueueSize) return new ArrayList<>(v1.subList(0, maxQueueSize));
|
||||
return v1;
|
||||
});
|
||||
}
|
||||
|
@ -162,42 +152,33 @@ public class Deduper implements Serializable {
|
|||
return entities.mapToPair(
|
||||
(PairFunction<String, String, MapDocument>)
|
||||
s -> {
|
||||
MapDocument mapDocument =
|
||||
MapDocumentUtil.asMapDocumentWithJPath(config, s);
|
||||
return new Tuple2<String, MapDocument>(
|
||||
mapDocument.getIdentifier(), mapDocument);
|
||||
MapDocument mapDocument = MapDocumentUtil.asMapDocumentWithJPath(config, s);
|
||||
return new Tuple2<String, MapDocument>(mapDocument.getIdentifier(), mapDocument);
|
||||
});
|
||||
}
|
||||
|
||||
public static JavaPairRDD<String, String> computeRelations2(
|
||||
JavaSparkContext context,
|
||||
JavaPairRDD<String, List<MapDocument>> blocks,
|
||||
DedupConfig config) {
|
||||
JavaSparkContext context, JavaPairRDD<String, List<MapDocument>> blocks, DedupConfig config) {
|
||||
Map<String, LongAccumulator> accumulators =
|
||||
DedupUtility.constructAccumulator(config, context.sc());
|
||||
|
||||
return blocks.flatMapToPair(
|
||||
return blocks
|
||||
.flatMapToPair(
|
||||
(PairFlatMapFunction<Tuple2<String, List<MapDocument>>, String, String>)
|
||||
it -> {
|
||||
try {
|
||||
final SparkReporter reporter =
|
||||
new SparkReporter(accumulators);
|
||||
new BlockProcessor(config)
|
||||
.processSortedBlock(it._1(), it._2(), reporter);
|
||||
final SparkReporter reporter = new SparkReporter(accumulators);
|
||||
new BlockProcessor(config).processSortedBlock(it._1(), it._2(), reporter);
|
||||
return reporter.getRelations().iterator();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
it._2().get(0).getIdentifier(), e);
|
||||
throw new RuntimeException(it._2().get(0).getIdentifier(), e);
|
||||
}
|
||||
})
|
||||
.mapToPair(
|
||||
(PairFunction<Tuple2<String, String>, String, Tuple2<String, String>>)
|
||||
item ->
|
||||
new Tuple2<String, Tuple2<String, String>>(
|
||||
item._1() + item._2(), item))
|
||||
item -> new Tuple2<String, Tuple2<String, String>>(item._1() + item._2(), item))
|
||||
.reduceByKey((a, b) -> a)
|
||||
.mapToPair(
|
||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>)
|
||||
Tuple2::_2);
|
||||
(PairFunction<Tuple2<String, Tuple2<String, String>>, String, String>) Tuple2::_2);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,7 +53,8 @@ public class SparkCreateConnectedComponent {
|
|||
s -> new Tuple2<Object, String>(getHashcode(s), s));
|
||||
|
||||
final Dataset<Relation> similarityRelations =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(DedupUtility.createSimRelPath(targetPath, entity))
|
||||
.as(Encoders.bean(Relation.class));
|
||||
final RDD<Edge<String>> edgeRdd =
|
||||
|
@ -62,13 +63,10 @@ public class SparkCreateConnectedComponent {
|
|||
.map(
|
||||
it ->
|
||||
new Edge<>(
|
||||
getHashcode(it.getSource()),
|
||||
getHashcode(it.getTarget()),
|
||||
it.getRelClass()))
|
||||
getHashcode(it.getSource()), getHashcode(it.getTarget()), it.getRelClass()))
|
||||
.rdd();
|
||||
final JavaRDD<ConnectedComponent> cc =
|
||||
GraphProcessor.findCCs(
|
||||
vertexes.rdd(), edgeRdd, dedupConf.getWf().getMaxIterations())
|
||||
GraphProcessor.findCCs(vertexes.rdd(), edgeRdd, dedupConf.getWf().getMaxIterations())
|
||||
.toJavaRDD();
|
||||
final Dataset<Relation> mergeRelation =
|
||||
spark.createDataset(
|
||||
|
@ -79,21 +77,16 @@ public class SparkCreateConnectedComponent {
|
|||
c.getDocIds().stream()
|
||||
.flatMap(
|
||||
id -> {
|
||||
List<Relation> tmp =
|
||||
new ArrayList<>();
|
||||
Relation r =
|
||||
new Relation();
|
||||
r.setSource(
|
||||
c.getCcId());
|
||||
List<Relation> tmp = new ArrayList<>();
|
||||
Relation r = new Relation();
|
||||
r.setSource(c.getCcId());
|
||||
r.setTarget(id);
|
||||
r.setRelClass("merges");
|
||||
tmp.add(r);
|
||||
r = new Relation();
|
||||
r.setTarget(
|
||||
c.getCcId());
|
||||
r.setTarget(c.getCcId());
|
||||
r.setSource(id);
|
||||
r.setRelClass(
|
||||
"isMergedIn");
|
||||
r.setRelClass("isMergedIn");
|
||||
tmp.add(r);
|
||||
return tmp.stream();
|
||||
})
|
||||
|
|
|
@ -46,8 +46,7 @@ public class SparkCreateSimRels {
|
|||
sc.textFile(inputPath + "/" + entity)
|
||||
.mapToPair(
|
||||
s -> {
|
||||
MapDocument d =
|
||||
MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
|
||||
return new Tuple2<>(d.getIdentifier(), d);
|
||||
});
|
||||
|
||||
|
@ -58,8 +57,7 @@ public class SparkCreateSimRels {
|
|||
// mapDocument, dedupConf);
|
||||
|
||||
// create relations by comparing only elements in the same group
|
||||
final JavaPairRDD<String, String> dedupRels =
|
||||
Deduper.computeRelations2(sc, blocks, dedupConf);
|
||||
final JavaPairRDD<String, String> dedupRels = Deduper.computeRelations2(sc, blocks, dedupConf);
|
||||
// final JavaPairRDD<String,String> dedupRels = Deduper.computeRelations(sc, blocks,
|
||||
// dedupConf);
|
||||
|
||||
|
@ -73,7 +71,8 @@ public class SparkCreateSimRels {
|
|||
return r;
|
||||
});
|
||||
|
||||
spark.createDataset(isSimilarToRDD.rdd(), Encoders.bean(Relation.class))
|
||||
spark
|
||||
.createDataset(isSimilarToRDD.rdd(), Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.mode("overwrite")
|
||||
.save(DedupUtility.createSimRelPath(targetPath, entity));
|
||||
|
|
|
@ -41,7 +41,8 @@ public class SparkPropagateRelationsJob {
|
|||
final String targetRelPath = parser.get("targetRelPath");
|
||||
|
||||
final Dataset<Relation> merge =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(mergeRelPath)
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.where("relClass == 'merges'");
|
||||
|
@ -57,25 +58,20 @@ public class SparkPropagateRelationsJob {
|
|||
final Relation mergeRelation = r._2();
|
||||
final Relation relation = r._1();
|
||||
|
||||
if (mergeRelation != null)
|
||||
relation.setSource(mergeRelation.getSource());
|
||||
if (mergeRelation != null) relation.setSource(mergeRelation.getSource());
|
||||
return relation;
|
||||
},
|
||||
Encoders.bean(Relation.class));
|
||||
|
||||
final Dataset<Relation> secondJoin =
|
||||
firstJoin
|
||||
.joinWith(
|
||||
merge,
|
||||
merge.col("target").equalTo(firstJoin.col("target")),
|
||||
"left_outer")
|
||||
.joinWith(merge, merge.col("target").equalTo(firstJoin.col("target")), "left_outer")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||
r -> {
|
||||
final Relation mergeRelation = r._2();
|
||||
final Relation relation = r._1();
|
||||
if (mergeRelation != null)
|
||||
relation.setTarget(mergeRelation.getSource());
|
||||
if (mergeRelation != null) relation.setTarget(mergeRelation.getSource());
|
||||
return relation;
|
||||
},
|
||||
Encoders.bean(Relation.class));
|
||||
|
|
|
@ -44,16 +44,13 @@ public class SparkUpdateEntityJob {
|
|||
final String entity = parser.get("entity");
|
||||
final String destination = parser.get("targetPath");
|
||||
|
||||
final Dataset<Relation> df =
|
||||
spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||
final Dataset<Relation> df = spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||
final JavaPairRDD<String, String> mergedIds =
|
||||
df.where("relClass == 'merges'")
|
||||
.select(df.col("target"))
|
||||
.distinct()
|
||||
.toJavaRDD()
|
||||
.mapToPair(
|
||||
(PairFunction<Row, String, String>)
|
||||
r -> new Tuple2<>(r.getString(0), "d"));
|
||||
.mapToPair((PairFunction<Row, String, String>) r -> new Tuple2<>(r.getString(0), "d"));
|
||||
final JavaRDD<String> sourceEntity = sc.textFile(entityPath);
|
||||
|
||||
final JavaRDD<String> dedupEntity = sc.textFile(dedupRecordPath);
|
||||
|
|
|
@ -49,9 +49,7 @@ public class GraphHiveImporterJob {
|
|||
conf.set("hive.metastore.uris", hiveMetastoreUris);
|
||||
|
||||
runWithSparkHiveSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> loadGraphAsHiveDB(spark, inputPath, hiveDbName));
|
||||
conf, isSparkSessionManaged, spark -> loadGraphAsHiveDB(spark, inputPath, hiveDbName));
|
||||
}
|
||||
|
||||
// protected for testing
|
||||
|
@ -64,7 +62,8 @@ public class GraphHiveImporterJob {
|
|||
// Read the input file and convert it into RDD of serializable object
|
||||
ModelSupport.oafTypes.forEach(
|
||||
(name, clazz) ->
|
||||
spark.createDataset(
|
||||
spark
|
||||
.createDataset(
|
||||
sc.textFile(inputPath + "/" + name)
|
||||
.map(s -> OBJECT_MAPPER.readValue(s, clazz))
|
||||
.rdd(),
|
||||
|
|
|
@ -46,7 +46,9 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
protected final Map<String, String> code2name;
|
||||
|
||||
protected static final Qualifier MAIN_TITLE_QUALIFIER =
|
||||
qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
|
||||
qualifier(
|
||||
"main title", "main title",
|
||||
"dnet:dataCite_title", "dnet:dataCite_title");
|
||||
|
||||
protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) {
|
||||
this.code2name = code2name;
|
||||
|
@ -67,8 +69,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
final Document doc =
|
||||
DocumentHelper.parseText(
|
||||
xml.replaceAll(
|
||||
"http://datacite.org/schema/kernel-4",
|
||||
"http://datacite.org/schema/kernel-3"));
|
||||
"http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
|
||||
|
||||
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
||||
final KeyValue collectedFrom =
|
||||
|
@ -212,12 +213,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||
r.setPid(
|
||||
prepareListStructProps(
|
||||
doc,
|
||||
"//oaf:identifier",
|
||||
"@identifierType",
|
||||
"dnet:pid_types",
|
||||
"dnet:pid_types",
|
||||
info));
|
||||
doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info));
|
||||
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
|
||||
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
|
||||
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
||||
|
@ -297,16 +293,14 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
|
||||
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
|
||||
|
||||
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
|
||||
Document doc, DataInfo info);
|
||||
protected abstract List<StructuredProperty> prepareSoftwareLicenses(Document doc, DataInfo info);
|
||||
|
||||
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
|
||||
Document doc, DataInfo info);
|
||||
|
||||
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
|
||||
|
||||
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
|
||||
Document doc, DataInfo info);
|
||||
protected abstract Field<String> prepareDatasetMetadataVersionNumber(Document doc, DataInfo info);
|
||||
|
||||
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
|
||||
|
||||
|
@ -368,9 +362,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
final Node n = (Node) o;
|
||||
final String classId = n.valueOf(xpathClassId);
|
||||
final String className = code2name.get(classId);
|
||||
res.add(
|
||||
structuredProperty(
|
||||
n.getText(), classId, className, schemeId, schemeName, info));
|
||||
res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
@ -404,8 +396,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
|
||||
protected OAIProvenance prepareOAIprovenance(final Document doc) {
|
||||
final Node n =
|
||||
doc.selectSingleNode(
|
||||
"//*[local-name()='provenance']/*[local-name()='originDescription']");
|
||||
doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
|
||||
|
||||
if (n == null) {
|
||||
return null;
|
||||
|
@ -422,8 +413,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
final String harvestDate = n.valueOf("@harvestDate");
|
||||
;
|
||||
|
||||
return oaiIProvenance(
|
||||
identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
||||
return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
|
||||
}
|
||||
|
||||
protected DataInfo prepareDataInfo(final Document doc) {
|
||||
|
@ -431,12 +421,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
|
||||
if (n == null) {
|
||||
return dataInfo(
|
||||
false,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS,
|
||||
"0.9");
|
||||
false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9");
|
||||
}
|
||||
|
||||
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
||||
|
@ -444,8 +429,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
|
||||
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
|
||||
|
||||
final boolean deletedbyinference =
|
||||
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
||||
final boolean deletedbyinference = Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
|
||||
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
|
||||
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
|
||||
final String trust = n.valueOf("./oaf:trust");
|
||||
|
|
|
@ -60,7 +60,8 @@ public class DispatchEntitiesApplication {
|
|||
|
||||
log.info("Processing entities ({}) in file: {}", type, sourcePath);
|
||||
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.textFile(sourcePath)
|
||||
.filter((FilterFunction<String>) value -> isEntityType(value, type))
|
||||
.map(
|
||||
|
|
|
@ -92,7 +92,8 @@ public class GenerateEntitiesApplication {
|
|||
.flatMap(list -> list.iterator()));
|
||||
}
|
||||
|
||||
inputRdd.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
||||
inputRdd
|
||||
.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
|
||||
.reduceByKey((o1, o2) -> merge(o1, o2))
|
||||
.map(Tuple2::_2)
|
||||
.map(
|
||||
|
|
|
@ -81,9 +81,7 @@ public class MergeClaimsApplication {
|
|||
readFromPath(spark, rawPath, clazz)
|
||||
.map(
|
||||
(MapFunction<T, Tuple2<String, T>>)
|
||||
value ->
|
||||
new Tuple2<>(
|
||||
ModelSupport.idFn().apply(value), value),
|
||||
value -> new Tuple2<>(ModelSupport.idFn().apply(value), value),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||
|
||||
final JavaSparkContext jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
@ -92,19 +90,15 @@ public class MergeClaimsApplication {
|
|||
.getValue()
|
||||
.map(
|
||||
(MapFunction<T, Tuple2<String, T>>)
|
||||
value ->
|
||||
new Tuple2<>(
|
||||
ModelSupport.idFn().apply(value), value),
|
||||
value -> new Tuple2<>(ModelSupport.idFn().apply(value), value),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
|
||||
|
||||
raw.joinWith(claim, raw.col("_1").equalTo(claim.col("_1")), "full_outer")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Tuple2<String, T>, Tuple2<String, T>>, T>)
|
||||
value -> {
|
||||
Optional<Tuple2<String, T>> opRaw =
|
||||
Optional.ofNullable(value._1());
|
||||
Optional<Tuple2<String, T>> opClaim =
|
||||
Optional.ofNullable(value._2());
|
||||
Optional<Tuple2<String, T>> opRaw = Optional.ofNullable(value._1());
|
||||
Optional<Tuple2<String, T>> opClaim = Optional.ofNullable(value._2());
|
||||
|
||||
return opRaw.isPresent()
|
||||
? opRaw.get()._2()
|
||||
|
@ -123,14 +117,13 @@ public class MergeClaimsApplication {
|
|||
|
||||
private static <T extends Oaf> Dataset<T> readFromPath(
|
||||
SparkSession spark, String path, Class<T> clazz) {
|
||||
return spark.read()
|
||||
return spark
|
||||
.read()
|
||||
.textFile(path)
|
||||
.map(
|
||||
(MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz),
|
||||
Encoders.bean(clazz))
|
||||
.filter(
|
||||
(FilterFunction<T>)
|
||||
value -> Objects.nonNull(ModelSupport.idFn().apply(value)));
|
||||
.filter((FilterFunction<T>) value -> Objects.nonNull(ModelSupport.idFn().apply(value)));
|
||||
}
|
||||
|
||||
private static void removeOutputDir(SparkSession spark, String path) {
|
||||
|
|
|
@ -90,8 +90,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
smdbe.execute("queryOrganizations.sql", smdbe::processOrganization);
|
||||
|
||||
log.info("Processing relations ds <-> orgs ...");
|
||||
smdbe.execute(
|
||||
"queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
||||
smdbe.execute("queryDatasourceOrganization.sql", smdbe::processDatasourceOrganization);
|
||||
|
||||
log.info("Processing projects <-> orgs ...");
|
||||
smdbe.execute("queryProjectOrganization.sql", smdbe::processProjectOrganization);
|
||||
|
@ -117,8 +116,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
public void execute(final String sqlFile, final Function<ResultSet, List<Oaf>> producer)
|
||||
throws Exception {
|
||||
final String sql =
|
||||
IOUtils.toString(
|
||||
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
||||
IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/sql/" + sqlFile));
|
||||
|
||||
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(oaf -> emitOaf(oaf));
|
||||
|
||||
|
@ -145,8 +143,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
ds.setExtraInfo(new ArrayList<>()); // Values not present in the DB
|
||||
ds.setOaiprovenance(null); // Values not present in the DB
|
||||
ds.setDatasourcetype(prepareQualifierSplitting(rs.getString("datasourcetype")));
|
||||
ds.setOpenairecompatibility(
|
||||
prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
||||
ds.setOpenairecompatibility(prepareQualifierSplitting(rs.getString("openairecompatibility")));
|
||||
ds.setOfficialname(field(rs.getString("officialname"), info));
|
||||
ds.setEnglishname(field(rs.getString("englishname"), info));
|
||||
ds.setWebsiteurl(field(rs.getString("websiteurl"), info));
|
||||
|
@ -180,10 +177,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
ds.setCertificates(field(rs.getString("certificates"), info));
|
||||
ds.setPolicies(new ArrayList<>()); // The sql query returns an empty array
|
||||
ds.setJournal(
|
||||
prepareJournal(
|
||||
rs.getString("officialname"),
|
||||
rs.getString("journal"),
|
||||
info)); // Journal
|
||||
prepareJournal(rs.getString("officialname"), rs.getString("journal"), info)); // Journal
|
||||
ds.setDataInfo(info);
|
||||
ds.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
|
||||
|
@ -277,13 +271,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
o.setEcnonprofit(field(Boolean.toString(rs.getBoolean("ecnonprofit")), info));
|
||||
o.setEcresearchorganization(
|
||||
field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
||||
o.setEchighereducation(
|
||||
field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
||||
o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
||||
o.setEcinternationalorganizationeurinterests(
|
||||
field(
|
||||
Boolean.toString(
|
||||
rs.getBoolean("ecinternationalorganizationeurinterests")),
|
||||
info));
|
||||
field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
|
||||
o.setEcinternationalorganization(
|
||||
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
||||
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
||||
|
@ -380,10 +370,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
false,
|
||||
false,
|
||||
qualifier(
|
||||
"user:claim",
|
||||
"user:claim",
|
||||
"dnet:provenanceActions",
|
||||
"dnet:provenanceActions"),
|
||||
"user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"),
|
||||
"0.9");
|
||||
|
||||
final List<KeyValue> collectedFrom =
|
||||
|
@ -416,11 +403,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
return Arrays.asList(r);
|
||||
} else {
|
||||
final String sourceId =
|
||||
createOpenaireId(
|
||||
rs.getString("source_type"), rs.getString("source_id"), false);
|
||||
createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false);
|
||||
final String targetId =
|
||||
createOpenaireId(
|
||||
rs.getString("target_type"), rs.getString("target_id"), false);
|
||||
createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false);
|
||||
|
||||
final Relation r1 = new Relation();
|
||||
final Relation r2 = new Relation();
|
||||
|
@ -496,9 +481,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
|
||||
private List<Field<String>> prepareListFields(final Array array, final DataInfo info) {
|
||||
try {
|
||||
return array != null
|
||||
? listFields(info, (String[]) array.getArray())
|
||||
: new ArrayList<>();
|
||||
return array != null ? listFields(info, (String[]) array.getArray()) : new ArrayList<>();
|
||||
} catch (final SQLException e) {
|
||||
throw new RuntimeException("Invalid SQL array", e);
|
||||
}
|
||||
|
@ -544,9 +527,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
|||
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2] : null;
|
||||
;
|
||||
if (issn != null || eissn != null || lissn != null) {
|
||||
return journal(
|
||||
name, issn, eissn, eissn, null, null, null, null, null, null, null,
|
||||
info);
|
||||
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,8 +42,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
|
|||
}
|
||||
|
||||
public MigrateMongoMdstoresApplication(
|
||||
final String hdfsPath, final String mongoBaseUrl, final String mongoDb)
|
||||
throws Exception {
|
||||
final String hdfsPath, final String mongoBaseUrl, final String mongoDb) throws Exception {
|
||||
super(hdfsPath);
|
||||
this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
|
||||
}
|
||||
|
@ -54,12 +53,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
|
|||
log.info("Found " + colls.size() + " mdstores");
|
||||
|
||||
for (final Entry<String, String> entry : colls.entrySet()) {
|
||||
log.info(
|
||||
"Processing mdstore "
|
||||
+ entry.getKey()
|
||||
+ " (collection: "
|
||||
+ entry.getValue()
|
||||
+ ")");
|
||||
log.info("Processing mdstore " + entry.getKey() + " (collection: " + entry.getValue() + ")");
|
||||
final String currentColl = entry.getValue();
|
||||
|
||||
for (final String xml : mdstoreClient.listRecords(currentColl)) {
|
||||
|
|
|
@ -100,11 +100,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||
instance.setAccessright(
|
||||
prepareQualifier(
|
||||
doc,
|
||||
"//oaf:accessrights",
|
||||
"dnet:access_modes",
|
||||
"dnet:access_modes"));
|
||||
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||
instance.setProcessingchargeamount(
|
||||
|
@ -123,16 +119,14 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected List<StructuredProperty> prepareRelevantDates(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected List<StructuredProperty> prepareRelevantDates(final Document doc, final DataInfo info) {
|
||||
return new ArrayList<>(); // NOT PRESENT IN OAF
|
||||
}
|
||||
|
||||
// SOFTWARES
|
||||
|
||||
@Override
|
||||
protected Qualifier prepareSoftwareProgrammingLanguage(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) {
|
||||
return null; // NOT PRESENT IN OAF
|
||||
}
|
||||
|
||||
|
@ -156,8 +150,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
|
||||
// DATASETS
|
||||
@Override
|
||||
protected List<GeoLocation> prepareDatasetGeoLocations(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected List<GeoLocation> prepareDatasetGeoLocations(final Document doc, final DataInfo info) {
|
||||
return new ArrayList<>(); // NOT PRESENT IN OAF
|
||||
}
|
||||
|
||||
|
|
|
@ -58,10 +58,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
structuredProperty(
|
||||
((Node) o).getText(),
|
||||
prepareQualifier(
|
||||
(Node) o,
|
||||
"./@nameIdentifierScheme",
|
||||
"dnet:pid_types",
|
||||
"dnet:pid_types"),
|
||||
(Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"),
|
||||
info));
|
||||
}
|
||||
return res;
|
||||
|
@ -78,21 +75,16 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
instance.setUrl(new ArrayList<>());
|
||||
instance.setInstancetype(
|
||||
prepareQualifier(
|
||||
doc,
|
||||
"//dr:CobjCategory",
|
||||
"dnet:publication_resource",
|
||||
"dnet:publication_resource"));
|
||||
doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource"));
|
||||
instance.setCollectedfrom(collectedfrom);
|
||||
instance.setHostedby(hostedby);
|
||||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||
instance.setAccessright(
|
||||
prepareQualifier(
|
||||
doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
||||
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||
instance.setProcessingchargeamount(
|
||||
field(doc.valueOf("//oaf:processingchargeamount"), info));
|
||||
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
|
||||
instance.setProcessingchargecurrency(
|
||||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||
|
||||
|
@ -119,8 +111,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected List<StructuredProperty> prepareRelevantDates(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected List<StructuredProperty> prepareRelevantDates(final Document doc, final DataInfo info) {
|
||||
final List<StructuredProperty> res = new ArrayList<>();
|
||||
for (final Object o : doc.selectNodes("//datacite:date")) {
|
||||
final String dateType = ((Node) o).valueOf("@dateType");
|
||||
|
@ -202,13 +193,9 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Qualifier prepareSoftwareProgrammingLanguage(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected Qualifier prepareSoftwareProgrammingLanguage(final Document doc, final DataInfo info) {
|
||||
return prepareQualifier(
|
||||
doc,
|
||||
"//datacite:format",
|
||||
"dnet:programming_languages",
|
||||
"dnet:programming_languages");
|
||||
doc, "//datacite:format", "dnet:programming_languages", "dnet:programming_languages");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -235,8 +222,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
// DATASETS
|
||||
|
||||
@Override
|
||||
protected List<GeoLocation> prepareDatasetGeoLocations(
|
||||
final Document doc, final DataInfo info) {
|
||||
protected List<GeoLocation> prepareDatasetGeoLocations(final Document doc, final DataInfo info) {
|
||||
final List<GeoLocation> res = new ArrayList<>();
|
||||
|
||||
for (final Object o : doc.selectNodes("//datacite:geoLocation")) {
|
||||
|
@ -293,8 +279,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
final List<Oaf> res = new ArrayList<>();
|
||||
|
||||
for (final Object o :
|
||||
doc.selectNodes(
|
||||
"//datacite:relatedIdentifier[@relatedIdentifierType='OPENAIRE']")) {
|
||||
doc.selectNodes("//datacite:relatedIdentifier[@relatedIdentifierType='OPENAIRE']")) {
|
||||
final String otherId = createOpenaireId(50, ((Node) o).getText(), false);
|
||||
final String type = ((Node) o).valueOf("@relationType");
|
||||
|
||||
|
@ -320,22 +305,10 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
|||
} else if (type.equals("IsPartOf")) {
|
||||
res.add(
|
||||
prepareOtherResultRel(
|
||||
collectedFrom,
|
||||
info,
|
||||
lastUpdateTimestamp,
|
||||
docId,
|
||||
otherId,
|
||||
"part",
|
||||
"IsPartOf"));
|
||||
collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf"));
|
||||
res.add(
|
||||
prepareOtherResultRel(
|
||||
collectedFrom,
|
||||
info,
|
||||
lastUpdateTimestamp,
|
||||
otherId,
|
||||
docId,
|
||||
"part",
|
||||
"HasParts"));
|
||||
collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts"));
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,8 +45,9 @@ public class AbstractMigrationApplication implements Closeable {
|
|||
private Configuration getConf() throws IOException {
|
||||
final Configuration conf = new Configuration();
|
||||
/*
|
||||
* conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||
* conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); System.setProperty("HADOOP_USER_NAME", hdfsUser);
|
||||
* conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.hdfs.impl",
|
||||
* org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.file.impl",
|
||||
* org.apache.hadoop.fs.LocalFileSystem.class.getName()); System.setProperty("HADOOP_USER_NAME", hdfsUser);
|
||||
* System.setProperty("hadoop.home.dir", "/"); FileSystem.get(URI.create(hdfsNameNode), conf);
|
||||
*/
|
||||
return conf;
|
||||
|
@ -64,9 +65,7 @@ public class AbstractMigrationApplication implements Closeable {
|
|||
|
||||
protected void emitOaf(final Oaf oaf) {
|
||||
try {
|
||||
emit(
|
||||
objectMapper.writeValueAsString(oaf),
|
||||
oaf.getClass().getSimpleName().toLowerCase());
|
||||
emit(objectMapper.writeValueAsString(oaf), oaf.getClass().getSimpleName().toLowerCase());
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
|
|
@ -71,9 +71,7 @@ public class MdstoreClient implements Closeable {
|
|||
if (!Iterables.contains(db.listCollectionNames(), collName)) {
|
||||
final String err =
|
||||
String.format(
|
||||
String.format(
|
||||
"Missing collection '%s' in database '%s'",
|
||||
collName, db.getName()));
|
||||
String.format("Missing collection '%s' in database '%s'", collName, db.getName()));
|
||||
log.warn(err);
|
||||
if (abortIfMissing) {
|
||||
throw new RuntimeException(err);
|
||||
|
|
|
@ -7,27 +7,25 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|||
public class MigrationConstants {
|
||||
|
||||
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
|
||||
qualifier(
|
||||
"publication",
|
||||
"publication",
|
||||
"dnet:result_typologies",
|
||||
"dnet:result_typologies");
|
||||
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
|
||||
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
|
||||
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
|
||||
qualifier(
|
||||
"dataset", "dataset",
|
||||
"dnet:result_typologies", "dnet:result_typologies");
|
||||
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
|
||||
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
|
||||
qualifier(
|
||||
"software", "software",
|
||||
"dnet:result_typologies", "dnet:result_typologies");
|
||||
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
|
||||
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
|
||||
qualifier(
|
||||
"other", "other",
|
||||
"dnet:result_typologies", "dnet:result_typologies");
|
||||
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
|
||||
qualifier(
|
||||
"sysimport:crosswalk:repository",
|
||||
"sysimport:crosswalk:repository",
|
||||
"dnet:provenanceActions",
|
||||
"dnet:provenanceActions");
|
||||
"sysimport:crosswalk:repository", "sysimport:crosswalk:repository",
|
||||
"dnet:provenanceActions", "dnet:provenanceActions");
|
||||
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
|
||||
qualifier(
|
||||
"sysimport:crosswalk:entityregistry",
|
||||
"sysimport:crosswalk:entityregistry",
|
||||
"dnet:provenanceActions",
|
||||
"dnet:provenanceActions");
|
||||
"sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry",
|
||||
"dnet:provenanceActions", "dnet:provenanceActions");
|
||||
}
|
||||
|
|
|
@ -76,8 +76,7 @@ public class OafMapperUtils {
|
|||
final String schemename,
|
||||
final DataInfo dataInfo) {
|
||||
|
||||
return structuredProperty(
|
||||
value, qualifier(classid, classname, schemeid, schemename), dataInfo);
|
||||
return structuredProperty(value, qualifier(classid, classname, schemeid, schemename), dataInfo);
|
||||
}
|
||||
|
||||
public static StructuredProperty structuredProperty(
|
||||
|
|
|
@ -34,8 +34,7 @@ public class PacePerson {
|
|||
public static Set<String> loadFromClasspath(final String classpath) {
|
||||
final Set<String> h = new HashSet<>();
|
||||
try {
|
||||
for (final String s :
|
||||
IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) {
|
||||
for (final String s : IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) {
|
||||
h.add(s);
|
||||
}
|
||||
} catch (final Throwable e) {
|
||||
|
|
|
@ -79,7 +79,8 @@ public class ImportDataFromMongo {
|
|||
.is(interpretation)
|
||||
.get();
|
||||
final List<String> ids = new ArrayList<>();
|
||||
metadata.find((Bson) query)
|
||||
metadata
|
||||
.find((Bson) query)
|
||||
.forEach((Consumer<Document>) document -> ids.add(document.getString("mdId")));
|
||||
List<String> databaseId =
|
||||
ids.stream()
|
||||
|
@ -121,8 +122,7 @@ public class ImportDataFromMongo {
|
|||
value.set(document.getString("body"));
|
||||
|
||||
if (counter.get() % 10000 == 0) {
|
||||
System.out.println(
|
||||
"Added " + counter.get());
|
||||
System.out.println("Added " + counter.get());
|
||||
}
|
||||
try {
|
||||
writer.append(key, value);
|
||||
|
@ -138,8 +138,7 @@ public class ImportDataFromMongo {
|
|||
* Return the name of mongo collection giving an MdStore ID
|
||||
*
|
||||
* @param mdId The id of the MDStore
|
||||
* @param metadataManager The collection metadataManager on mongo which contains this
|
||||
* information
|
||||
* @param metadataManager The collection metadataManager on mongo which contains this information
|
||||
* @return
|
||||
*/
|
||||
private static String getCurrentId(
|
||||
|
|
|
@ -56,23 +56,27 @@ public class SparkExtractEntitiesJob {
|
|||
.collect(Collectors.toList());
|
||||
if (entities.stream().anyMatch("dataset"::equalsIgnoreCase)) {
|
||||
// Extract Dataset
|
||||
inputRDD.filter(SparkExtractEntitiesJob::isDataset)
|
||||
inputRDD
|
||||
.filter(SparkExtractEntitiesJob::isDataset)
|
||||
.saveAsTextFile(targetPath + "/dataset/" + tdir, GzipCodec.class);
|
||||
}
|
||||
if (entities.stream().anyMatch("unknown"::equalsIgnoreCase)) {
|
||||
// Extract Unknown
|
||||
inputRDD.filter(SparkExtractEntitiesJob::isUnknown)
|
||||
inputRDD
|
||||
.filter(SparkExtractEntitiesJob::isUnknown)
|
||||
.saveAsTextFile(targetPath + "/unknown/" + tdir, GzipCodec.class);
|
||||
}
|
||||
|
||||
if (entities.stream().anyMatch("relation"::equalsIgnoreCase)) {
|
||||
// Extract Relation
|
||||
inputRDD.filter(SparkExtractEntitiesJob::isRelation)
|
||||
inputRDD
|
||||
.filter(SparkExtractEntitiesJob::isRelation)
|
||||
.saveAsTextFile(targetPath + "/relation/" + tdir, GzipCodec.class);
|
||||
}
|
||||
if (entities.stream().anyMatch("publication"::equalsIgnoreCase)) {
|
||||
// Extract Relation
|
||||
inputRDD.filter(SparkExtractEntitiesJob::isPublication)
|
||||
inputRDD
|
||||
.filter(SparkExtractEntitiesJob::isPublication)
|
||||
.saveAsTextFile(targetPath + "/publication/" + tdir, GzipCodec.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,8 +41,7 @@ public class SparkSXGeneratePidSimlarity {
|
|||
.filter(
|
||||
t ->
|
||||
!StringUtils.substringAfter(t._1(), "|")
|
||||
.equalsIgnoreCase(
|
||||
StringUtils.substringAfter(t._2(), "::")))
|
||||
.equalsIgnoreCase(StringUtils.substringAfter(t._2(), "::")))
|
||||
.distinct();
|
||||
|
||||
final JavaPairRDD<String, String> publicationSimRel =
|
||||
|
@ -56,8 +55,7 @@ public class SparkSXGeneratePidSimlarity {
|
|||
.filter(
|
||||
t ->
|
||||
!StringUtils.substringAfter(t._1(), "|")
|
||||
.equalsIgnoreCase(
|
||||
StringUtils.substringAfter(t._2(), "::")))
|
||||
.equalsIgnoreCase(StringUtils.substringAfter(t._2(), "::")))
|
||||
.distinct();
|
||||
|
||||
JavaRDD<DLIRelation> simRel =
|
||||
|
@ -71,7 +69,8 @@ public class SparkSXGeneratePidSimlarity {
|
|||
r.setRelType("similar");
|
||||
return r;
|
||||
});
|
||||
spark.createDataset(simRel.rdd(), Encoders.bean(DLIRelation.class))
|
||||
spark
|
||||
.createDataset(simRel.rdd(), Encoders.bean(DLIRelation.class))
|
||||
.distinct()
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
|
|
|
@ -65,9 +65,7 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
SparkSession.builder()
|
||||
.config(
|
||||
new SparkConf()
|
||||
.set(
|
||||
"spark.serializer",
|
||||
"org.apache.spark.serializer.KryoSerializer"))
|
||||
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"))
|
||||
.appName(SparkScholexplorerCreateRawGraphJob.class.getSimpleName())
|
||||
.master(parser.get("master"))
|
||||
.getOrCreate();
|
||||
|
@ -89,17 +87,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
}
|
||||
switch (entity) {
|
||||
case "dataset":
|
||||
union.mapToPair(
|
||||
union
|
||||
.mapToPair(
|
||||
(PairFunction<String, String, DLIDataset>)
|
||||
f -> {
|
||||
final String id = getJPathString(IDJSONPATH, f);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(
|
||||
DeserializationFeature
|
||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
||||
false);
|
||||
return new Tuple2<>(
|
||||
id, mapper.readValue(f, DLIDataset.class));
|
||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
return new Tuple2<>(id, mapper.readValue(f, DLIDataset.class));
|
||||
})
|
||||
.reduceByKey(
|
||||
(a, b) -> {
|
||||
|
@ -114,17 +109,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
.saveAsTextFile(targetPath, GzipCodec.class);
|
||||
break;
|
||||
case "publication":
|
||||
union.mapToPair(
|
||||
union
|
||||
.mapToPair(
|
||||
(PairFunction<String, String, DLIPublication>)
|
||||
f -> {
|
||||
final String id = getJPathString(IDJSONPATH, f);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(
|
||||
DeserializationFeature
|
||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
||||
false);
|
||||
return new Tuple2<>(
|
||||
id, mapper.readValue(f, DLIPublication.class));
|
||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
return new Tuple2<>(id, mapper.readValue(f, DLIPublication.class));
|
||||
})
|
||||
.reduceByKey(
|
||||
(a, b) -> {
|
||||
|
@ -139,17 +131,14 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
.saveAsTextFile(targetPath, GzipCodec.class);
|
||||
break;
|
||||
case "unknown":
|
||||
union.mapToPair(
|
||||
union
|
||||
.mapToPair(
|
||||
(PairFunction<String, String, DLIUnknown>)
|
||||
f -> {
|
||||
final String id = getJPathString(IDJSONPATH, f);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(
|
||||
DeserializationFeature
|
||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
||||
false);
|
||||
return new Tuple2<>(
|
||||
id, mapper.readValue(f, DLIUnknown.class));
|
||||
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
return new Tuple2<>(id, mapper.readValue(f, DLIUnknown.class));
|
||||
})
|
||||
.reduceByKey(
|
||||
(a, b) -> {
|
||||
|
@ -165,25 +154,18 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
break;
|
||||
case "relation":
|
||||
SparkSXGeneratePidSimlarity.generateDataFrame(
|
||||
spark,
|
||||
sc,
|
||||
inputPath.replace("/relation", ""),
|
||||
targetPath.replace("/relation", ""));
|
||||
spark, sc, inputPath.replace("/relation", ""), targetPath.replace("/relation", ""));
|
||||
RDD<DLIRelation> rdd =
|
||||
union.mapToPair(
|
||||
union
|
||||
.mapToPair(
|
||||
(PairFunction<String, String, DLIRelation>)
|
||||
f -> {
|
||||
final String source =
|
||||
getJPathString(SOURCEJSONPATH, f);
|
||||
final String target =
|
||||
getJPathString(TARGETJSONPATH, f);
|
||||
final String reltype =
|
||||
getJPathString(RELJSONPATH, f);
|
||||
final String source = getJPathString(SOURCEJSONPATH, f);
|
||||
final String target = getJPathString(TARGETJSONPATH, f);
|
||||
final String reltype = getJPathString(RELJSONPATH, f);
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.configure(
|
||||
DeserializationFeature
|
||||
.FAIL_ON_UNKNOWN_PROPERTIES,
|
||||
false);
|
||||
DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
return new Tuple2<>(
|
||||
DHPUtils.md5(
|
||||
String.format(
|
||||
|
@ -201,17 +183,17 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
.map(Tuple2::_2)
|
||||
.rdd();
|
||||
|
||||
spark.createDataset(rdd, Encoders.bean(DLIRelation.class))
|
||||
spark
|
||||
.createDataset(rdd, Encoders.bean(DLIRelation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.save(targetPath);
|
||||
Dataset<Relation> rel_ds =
|
||||
spark.read().load(targetPath).as(Encoders.bean(Relation.class));
|
||||
Dataset<Relation> rel_ds = spark.read().load(targetPath).as(Encoders.bean(Relation.class));
|
||||
|
||||
System.out.println(
|
||||
"LOADING PATH :" + targetPath.replace("/relation", "") + "/pid_simRel");
|
||||
System.out.println("LOADING PATH :" + targetPath.replace("/relation", "") + "/pid_simRel");
|
||||
Dataset<Relation> sim_ds =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(targetPath.replace("/relation", "") + "/pid_simRel")
|
||||
.as(Encoders.bean(Relation.class));
|
||||
|
||||
|
@ -219,24 +201,18 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
sim_ds.map(
|
||||
(MapFunction<Relation, Relation>)
|
||||
relation -> {
|
||||
final String type =
|
||||
StringUtils.substringBefore(
|
||||
relation.getSource(), "|");
|
||||
final String type = StringUtils.substringBefore(relation.getSource(), "|");
|
||||
relation.setTarget(
|
||||
String.format(
|
||||
"%s|%s",
|
||||
type,
|
||||
StringUtils.substringAfter(
|
||||
relation.getTarget(), "::")));
|
||||
type, StringUtils.substringAfter(relation.getTarget(), "::")));
|
||||
return relation;
|
||||
},
|
||||
Encoders.bean(Relation.class));
|
||||
|
||||
final Dataset<Relation> firstJoin =
|
||||
rel_ds.joinWith(
|
||||
ids,
|
||||
ids.col("target").equalTo(rel_ds.col("source")),
|
||||
"left_outer")
|
||||
rel_ds
|
||||
.joinWith(ids, ids.col("target").equalTo(rel_ds.col("source")), "left_outer")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||
s -> {
|
||||
|
@ -249,10 +225,7 @@ public class SparkScholexplorerCreateRawGraphJob {
|
|||
|
||||
Dataset<Relation> secondJoin =
|
||||
firstJoin
|
||||
.joinWith(
|
||||
ids,
|
||||
ids.col("target").equalTo(firstJoin.col("target")),
|
||||
"left_outer")
|
||||
.joinWith(ids, ids.col("target").equalTo(firstJoin.col("target")), "left_outer")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Relation, Relation>, Relation>)
|
||||
s -> {
|
||||
|
|
|
@ -49,16 +49,13 @@ public class SparkScholexplorerGraphImporter {
|
|||
record -> {
|
||||
switch (parser.get("entity")) {
|
||||
case "dataset":
|
||||
final DatasetScholexplorerParser d =
|
||||
new DatasetScholexplorerParser();
|
||||
final DatasetScholexplorerParser d = new DatasetScholexplorerParser();
|
||||
return d.parseObject(record, relationMapper).iterator();
|
||||
case "publication":
|
||||
final PublicationScholexplorerParser p =
|
||||
new PublicationScholexplorerParser();
|
||||
final PublicationScholexplorerParser p = new PublicationScholexplorerParser();
|
||||
return p.parseObject(record, relationMapper).iterator();
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"wrong values of entities");
|
||||
throw new IllegalArgumentException("wrong values of entities");
|
||||
}
|
||||
})
|
||||
.map(
|
||||
|
|
|
@ -114,8 +114,7 @@ public abstract class AbstractScholexplorerParser {
|
|||
|
||||
return type
|
||||
+ DHPUtils.md5(
|
||||
String.format(
|
||||
"%s::%s", pid.toLowerCase().trim(), pidType.toLowerCase().trim()));
|
||||
String.format("%s::%s", pid.toLowerCase().trim(), pidType.toLowerCase().trim()));
|
||||
}
|
||||
|
||||
protected DLIUnknown createUnknownObject(
|
||||
|
@ -161,22 +160,15 @@ public abstract class AbstractScholexplorerParser {
|
|||
DLIRelation r = new DLIRelation();
|
||||
r.setSource(parsedObject.getId());
|
||||
final String relatedPid = n.getTextValue();
|
||||
final String relatedPidType =
|
||||
n.getAttributes().get("relatedIdentifierType");
|
||||
final String relatedPidType = n.getAttributes().get("relatedIdentifierType");
|
||||
final String relatedType =
|
||||
n.getAttributes()
|
||||
.getOrDefault("entityType", "unknown");
|
||||
String relationSemantic =
|
||||
n.getAttributes().get("relationType");
|
||||
n.getAttributes().getOrDefault("entityType", "unknown");
|
||||
String relationSemantic = n.getAttributes().get("relationType");
|
||||
String inverseRelation;
|
||||
final String targetId =
|
||||
generateId(relatedPid, relatedPidType, relatedType);
|
||||
final String targetId = generateId(relatedPid, relatedPidType, relatedType);
|
||||
r.setDateOfCollection(dateOfCollection);
|
||||
if (relationMapper.containsKey(
|
||||
relationSemantic.toLowerCase())) {
|
||||
RelInfo relInfo =
|
||||
relationMapper.get(
|
||||
relationSemantic.toLowerCase());
|
||||
if (relationMapper.containsKey(relationSemantic.toLowerCase())) {
|
||||
RelInfo relInfo = relationMapper.get(relationSemantic.toLowerCase());
|
||||
relationSemantic = relInfo.getOriginal();
|
||||
inverseRelation = relInfo.getInverse();
|
||||
} else {
|
||||
|
|
|
@ -37,8 +37,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
|
||||
parsedObject.setOriginalId(
|
||||
Collections.singletonList(
|
||||
VtdUtilityParser.getSingleValue(
|
||||
ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||
|
||||
parsedObject.setOriginalObjIdentifier(
|
||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='objIdentifier']"));
|
||||
|
@ -96,8 +95,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
provenance.setId(it.getAttributes().get("id"));
|
||||
provenance.setName(it.getAttributes().get("name"));
|
||||
provenance.setCollectionMode(provisionMode);
|
||||
provenance.setCompletionStatus(
|
||||
it.getAttributes().get("completionStatus"));
|
||||
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||
provenances.add(provenance);
|
||||
});
|
||||
}
|
||||
|
@ -109,8 +107,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
provenance.setId(it.getAttributes().get("id"));
|
||||
provenance.setName(it.getAttributes().get("name"));
|
||||
provenance.setCollectionMode("resolved");
|
||||
provenance.setCompletionStatus(
|
||||
it.getAttributes().get("completionStatus"));
|
||||
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||
provenances.add(provenance);
|
||||
});
|
||||
}
|
||||
|
@ -127,8 +124,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
})
|
||||
.collect(Collectors.toList()));
|
||||
parsedObject.setCompletionStatus(
|
||||
VtdUtilityParser.getSingleValue(
|
||||
ap, vn, "//*[local-name()='completionStatus']"));
|
||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='completionStatus']"));
|
||||
|
||||
final List<Node> identifierType =
|
||||
VtdUtilityParser.getTextValuesWithAttributes(
|
||||
|
@ -143,14 +139,10 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
parsedObject.setPid(Collections.singletonList(currentPid));
|
||||
|
||||
final String sourceId =
|
||||
generateId(
|
||||
currentPid.getValue(),
|
||||
currentPid.getQualifier().getClassid(),
|
||||
"dataset");
|
||||
generateId(currentPid.getValue(), currentPid.getQualifier().getClassid(), "dataset");
|
||||
parsedObject.setId(sourceId);
|
||||
|
||||
List<String> descs =
|
||||
VtdUtilityParser.getTextValue(ap, vn, "//*[local-name()='description']");
|
||||
List<String> descs = VtdUtilityParser.getTextValue(ap, vn, "//*[local-name()='description']");
|
||||
if (descs != null && descs.size() > 0)
|
||||
parsedObject.setDescription(
|
||||
descs.stream()
|
||||
|
@ -169,10 +161,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
vn,
|
||||
"//*[local-name()='relatedIdentifier']",
|
||||
Arrays.asList(
|
||||
"relatedIdentifierType",
|
||||
"relationType",
|
||||
"entityType",
|
||||
"inverseRelationType"));
|
||||
"relatedIdentifierType", "relationType", "entityType", "inverseRelationType"));
|
||||
|
||||
generateRelations(
|
||||
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
||||
|
@ -187,9 +176,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
|||
.map(
|
||||
it -> {
|
||||
final Instance i = new Instance();
|
||||
i.setUrl(
|
||||
Collections.singletonList(
|
||||
currentPid.getValue()));
|
||||
i.setUrl(Collections.singletonList(currentPid.getValue()));
|
||||
KeyValue h = new KeyValue();
|
||||
i.setHostedby(h);
|
||||
h.setKey(it.getAttributes().get("id"));
|
||||
|
|
|
@ -43,8 +43,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='resolvedDate']");
|
||||
parsedObject.setOriginalId(
|
||||
Collections.singletonList(
|
||||
VtdUtilityParser.getSingleValue(
|
||||
ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='recordIdentifier']")));
|
||||
|
||||
if (StringUtils.isNotBlank(resolvedDate)) {
|
||||
StructuredProperty currentDate = new StructuredProperty();
|
||||
|
@ -67,10 +66,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
inferPid(currentPid);
|
||||
parsedObject.setPid(Collections.singletonList(currentPid));
|
||||
final String sourceId =
|
||||
generateId(
|
||||
currentPid.getValue(),
|
||||
currentPid.getQualifier().getClassid(),
|
||||
"publication");
|
||||
generateId(currentPid.getValue(), currentPid.getQualifier().getClassid(), "publication");
|
||||
parsedObject.setId(sourceId);
|
||||
|
||||
parsedObject.setOriginalObjIdentifier(
|
||||
|
@ -107,8 +103,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
provenance.setId(it.getAttributes().get("id"));
|
||||
provenance.setName(it.getAttributes().get("name"));
|
||||
provenance.setCollectionMode(provisionMode);
|
||||
provenance.setCompletionStatus(
|
||||
it.getAttributes().get("completionStatus"));
|
||||
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||
provenances.add(provenance);
|
||||
});
|
||||
}
|
||||
|
@ -120,16 +115,14 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
provenance.setId(it.getAttributes().get("id"));
|
||||
provenance.setName(it.getAttributes().get("name"));
|
||||
provenance.setCollectionMode("resolved");
|
||||
provenance.setCompletionStatus(
|
||||
it.getAttributes().get("completionStatus"));
|
||||
provenance.setCompletionStatus(it.getAttributes().get("completionStatus"));
|
||||
provenances.add(provenance);
|
||||
});
|
||||
}
|
||||
|
||||
parsedObject.setDlicollectedfrom(provenances);
|
||||
parsedObject.setCompletionStatus(
|
||||
VtdUtilityParser.getSingleValue(
|
||||
ap, vn, "//*[local-name()='completionStatus']"));
|
||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='completionStatus']"));
|
||||
|
||||
parsedObject.setCollectedfrom(
|
||||
parsedObject.getDlicollectedfrom().stream()
|
||||
|
@ -148,10 +141,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
vn,
|
||||
"//*[local-name()='relatedIdentifier']",
|
||||
Arrays.asList(
|
||||
"relatedIdentifierType",
|
||||
"relationType",
|
||||
"entityType",
|
||||
"inverseRelationType"));
|
||||
"relatedIdentifierType", "relationType", "entityType", "inverseRelationType"));
|
||||
generateRelations(
|
||||
relationMapper, parsedObject, result, di, dateOfCollection, relatedIdentifiers);
|
||||
|
||||
|
@ -165,9 +155,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
.map(
|
||||
it -> {
|
||||
final Instance i = new Instance();
|
||||
i.setUrl(
|
||||
Collections.singletonList(
|
||||
currentPid.getValue()));
|
||||
i.setUrl(Collections.singletonList(currentPid.getValue()));
|
||||
KeyValue h = new KeyValue();
|
||||
i.setHostedby(h);
|
||||
h.setKey(it.getAttributes().get("id"));
|
||||
|
@ -231,10 +219,7 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
|||
List<StructuredProperty> subjects =
|
||||
extractSubject(
|
||||
VtdUtilityParser.getTextValuesWithAttributes(
|
||||
ap,
|
||||
vn,
|
||||
"//*[local-name()='subject']",
|
||||
Collections.singletonList("scheme")));
|
||||
ap, vn, "//*[local-name()='subject']", Collections.singletonList("scheme")));
|
||||
parsedObject.setSubject(subjects);
|
||||
|
||||
parsedObject.setDataInfo(di);
|
||||
|
|
|
@ -55,11 +55,9 @@ public class MigrateDbEntitiesApplicationTest {
|
|||
assertEquals(ds.getEnglishname().getValue(), getValueAsString("englishname", fields));
|
||||
assertEquals(ds.getContactemail().getValue(), getValueAsString("contactemail", fields));
|
||||
assertEquals(ds.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
||||
assertEquals(ds.getNamespaceprefix().getValue(), getValueAsString("namespaceprefix", fields));
|
||||
assertEquals(
|
||||
ds.getNamespaceprefix().getValue(), getValueAsString("namespaceprefix", fields));
|
||||
assertEquals(
|
||||
ds.getCollectedfrom().get(0).getValue(),
|
||||
getValueAsString("collectedfromname", fields));
|
||||
ds.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -76,8 +74,7 @@ public class MigrateDbEntitiesApplicationTest {
|
|||
assertEquals(p.getAcronym().getValue(), getValueAsString("acronym", fields));
|
||||
assertEquals(p.getTitle().getValue(), getValueAsString("title", fields));
|
||||
assertEquals(
|
||||
p.getCollectedfrom().get(0).getValue(),
|
||||
getValueAsString("collectedfromname", fields));
|
||||
p.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -96,18 +93,14 @@ public class MigrateDbEntitiesApplicationTest {
|
|||
assertEquals(o.getLegalshortname().getValue(), getValueAsString("legalshortname", fields));
|
||||
assertEquals(o.getLegalname().getValue(), getValueAsString("legalname", fields));
|
||||
assertEquals(o.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
||||
assertEquals(
|
||||
o.getCountry().getClassid(), getValueAsString("country", fields).split("@@@")[0]);
|
||||
assertEquals(o.getCountry().getClassid(), getValueAsString("country", fields).split("@@@")[0]);
|
||||
assertEquals(
|
||||
o.getCountry().getClassname(), getValueAsString("country", fields).split("@@@")[1]);
|
||||
assertEquals(o.getCountry().getSchemeid(), getValueAsString("country", fields).split("@@@")[2]);
|
||||
assertEquals(
|
||||
o.getCountry().getSchemeid(), getValueAsString("country", fields).split("@@@")[2]);
|
||||
o.getCountry().getSchemename(), getValueAsString("country", fields).split("@@@")[3]);
|
||||
assertEquals(
|
||||
o.getCountry().getSchemename(),
|
||||
getValueAsString("country", fields).split("@@@")[3]);
|
||||
assertEquals(
|
||||
o.getCollectedfrom().get(0).getValue(),
|
||||
getValueAsString("collectedfromname", fields));
|
||||
o.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -201,8 +194,7 @@ public class MigrateDbEntitiesApplicationTest {
|
|||
private List<TypedField> prepareMocks(final String jsonFile) throws IOException, SQLException {
|
||||
final String json = IOUtils.toString(getClass().getResourceAsStream(jsonFile));
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
final List<TypedField> list =
|
||||
mapper.readValue(json, new TypeReference<List<TypedField>>() {});
|
||||
final List<TypedField> list = mapper.readValue(json, new TypeReference<List<TypedField>>() {});
|
||||
|
||||
for (final TypedField tf : list) {
|
||||
if (tf.getValue() == null) {
|
||||
|
@ -263,8 +255,7 @@ public class MigrateDbEntitiesApplicationTest {
|
|||
break;
|
||||
case "string":
|
||||
default:
|
||||
Mockito.when(rs.getString(tf.getField()))
|
||||
.thenReturn(tf.getValue().toString());
|
||||
Mockito.when(rs.getString(tf.getField())).thenReturn(tf.getValue().toString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,12 +37,8 @@ public class SparkGenerateScholix {
|
|||
|
||||
conf.registerKryoClasses(
|
||||
new Class[] {
|
||||
Scholix.class,
|
||||
ScholixCollectedFrom.class,
|
||||
ScholixEntityId.class,
|
||||
ScholixIdentifier.class,
|
||||
ScholixRelationship.class,
|
||||
ScholixResource.class
|
||||
Scholix.class, ScholixCollectedFrom.class, ScholixEntityId.class,
|
||||
ScholixIdentifier.class, ScholixRelationship.class, ScholixResource.class
|
||||
});
|
||||
|
||||
final String graphPath = parser.get("graphPath");
|
||||
|
@ -51,9 +47,7 @@ public class SparkGenerateScholix {
|
|||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
final Dataset<ScholixSummary> scholixSummary =
|
||||
spark.read()
|
||||
.load(workingDirPath + "/summary")
|
||||
.as(Encoders.bean(ScholixSummary.class));
|
||||
spark.read().load(workingDirPath + "/summary").as(Encoders.bean(ScholixSummary.class));
|
||||
final Dataset<Relation> rels =
|
||||
spark.read().load(graphPath + "/relation").as(Encoders.bean(Relation.class));
|
||||
|
||||
|
@ -80,15 +74,14 @@ public class SparkGenerateScholix {
|
|||
.save(workingDirPath + "/scholix_target");
|
||||
|
||||
Dataset<ScholixResource> target =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(workingDirPath + "/scholix_target")
|
||||
.as(Encoders.bean(ScholixResource.class));
|
||||
|
||||
scholix_final
|
||||
.joinWith(
|
||||
target,
|
||||
scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")),
|
||||
"inner")
|
||||
target, scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")), "inner")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Scholix, ScholixResource>, Scholix>)
|
||||
f -> {
|
||||
|
|
|
@ -34,44 +34,35 @@ public class SparkGenerateSummary {
|
|||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
Dataset<RelatedItemInfo> rInfo =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(workingDirPath + "/relatedItemCount")
|
||||
.as(Encoders.bean(RelatedItemInfo.class));
|
||||
|
||||
Dataset<ScholixSummary> entity =
|
||||
spark.createDataset(
|
||||
sc.textFile(
|
||||
graphPath
|
||||
+ "/publication,"
|
||||
+ graphPath
|
||||
+ "/dataset,"
|
||||
+ graphPath
|
||||
+ "/unknown")
|
||||
graphPath + "/publication," + graphPath + "/dataset," + graphPath + "/unknown")
|
||||
.map(
|
||||
s ->
|
||||
ScholixSummary.fromJsonOAF(
|
||||
ProvisionUtil.getItemTypeFromId(
|
||||
DHPUtils.getJPathString(
|
||||
jsonIDPath, s)),
|
||||
ProvisionUtil.getItemTypeFromId(DHPUtils.getJPathString(jsonIDPath, s)),
|
||||
s))
|
||||
.rdd(),
|
||||
Encoders.bean(ScholixSummary.class));
|
||||
|
||||
Dataset<ScholixSummary> summaryComplete =
|
||||
rInfo.joinWith(entity, rInfo.col("source").equalTo(entity.col("id")))
|
||||
rInfo
|
||||
.joinWith(entity, rInfo.col("source").equalTo(entity.col("id")))
|
||||
.map(
|
||||
(MapFunction<
|
||||
Tuple2<RelatedItemInfo, ScholixSummary>,
|
||||
ScholixSummary>)
|
||||
(MapFunction<Tuple2<RelatedItemInfo, ScholixSummary>, ScholixSummary>)
|
||||
t -> {
|
||||
ScholixSummary scholixSummary = t._2();
|
||||
RelatedItemInfo relatedItemInfo = t._1();
|
||||
scholixSummary.setRelatedDatasets(
|
||||
relatedItemInfo.getRelatedDataset());
|
||||
scholixSummary.setRelatedDatasets(relatedItemInfo.getRelatedDataset());
|
||||
scholixSummary.setRelatedPublications(
|
||||
relatedItemInfo.getRelatedPublication());
|
||||
scholixSummary.setRelatedUnknown(
|
||||
relatedItemInfo.getRelatedUnknown());
|
||||
scholixSummary.setRelatedUnknown(relatedItemInfo.getRelatedUnknown());
|
||||
return scholixSummary;
|
||||
},
|
||||
Encoders.bean(ScholixSummary.class));
|
||||
|
|
|
@ -45,7 +45,8 @@ public class SparkIndexCollectionOnES {
|
|||
|
||||
if ("summary".equalsIgnoreCase(type))
|
||||
inputRdd =
|
||||
spark.read()
|
||||
spark
|
||||
.read()
|
||||
.load(sourcePath)
|
||||
.as(Encoders.bean(ScholixSummary.class))
|
||||
.map(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue