[graph cleaning] avoid stack overflow error when navigating Oaf objects declaring an Enum

This commit is contained in:
Claudio Atzori 2023-12-07 23:09:54 +01:00
parent 70eb1796b2
commit cb71a7936b
3 changed files with 34 additions and 0 deletions

View File

@ -59,6 +59,7 @@ public class OafCleaner implements Serializable {
private static boolean isPrimitive(Object o) { private static boolean isPrimitive(Object o) {
return Objects.isNull(o) return Objects.isNull(o)
|| o.getClass().isPrimitive() || o.getClass().isPrimitive()
|| o.getClass().isEnum()
|| o instanceof Class || o instanceof Class
|| o instanceof Integer || o instanceof Integer
|| o instanceof Double || o instanceof Double

View File

@ -690,6 +690,38 @@ public class CleanGraphSparkJobTest {
} }
@Test
void testClean_ORP() throws Exception {
final String prefix = "gcube ";
new CleanGraphSparkJob(
args(
"/eu/dnetlib/dhp/oa/graph/input_clean_graph_parameters.json",
new String[] {
"--inputPath", graphInputPath + "/orp",
"--outputPath", graphOutputPath + "/orp",
"--isLookupUrl", "lookupurl",
"--graphTableClassName", OtherResearchProduct.class.getCanonicalName(),
"--deepClean", "true",
"--contextId", "sobigdata",
"--verifyParam", "gCube ",
"--masterDuplicatePath", dsMasterDuplicatePath,
"--country", "NL",
"--verifyCountryParam", "10.17632",
"--collectedfrom", "NARCIS",
"--hostedBy", Objects
.requireNonNull(
getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy"))
.getPath()
})).run(false, isLookUpService);
Dataset<OtherResearchProduct> orp = read(spark, graphOutputPath + "/orp", OtherResearchProduct.class);
assertEquals(1, orp.count());
}
@Test @Test
void testCleanCfHbSparkJob() throws Exception { void testCleanCfHbSparkJob() throws Exception {

File diff suppressed because one or more lines are too long