Adding spark.close() to avoid Only one SparkContext may be running in this JVM error while running test on Jenkins and fixed issue

This commit is contained in:
Miriam Baglioni 2021-07-13 10:33:00 +02:00
parent 4f309e625c
commit c26980f1c4
1 changed files with 3 additions and 3 deletions

View File

@ -61,7 +61,7 @@ class MappingORCIDToOAFTest {
assertTrue(oA == p.count()) assertTrue(oA == p.count())
println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(p.first())) println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(p.first()))
spark.close()
} }
@ -78,8 +78,8 @@ class MappingORCIDToOAFTest {
val oaf = ORCIDToOAF.convertTOOAF(orcid) val oaf = ORCIDToOAF.convertTOOAF(orcid)
assert(oaf.getPid.size() == 1) assert(oaf.getPid.size() == 1)
oaf.getPid.toList.foreach(pid => assert(pid.getQualifier.getClassid.equals("doi"))) oaf.getPid.toList.foreach(pid => assert(pid.getQualifier.getClassid.equals("doi")))
oaf.getPid.toList.foreach(pid => assert(pid.getValue.equals("10.1042/BCJ20160876".toLowerCase()))) oaf.getPid.toList.foreach(pid => assert(pid.getValue.equals("10.1042/BCJ20160876")))
//println(mapper.writeValueAsString(ORCIDToOAF.convertTOOAF(orcid))) //println(mapper.writeValueAsString(oaf))
} }