forked from D-Net/dnet-hadoop
Adding spark.close() to avoid Only one SparkContext may be running in this JVM error while running test on Jenkins and fixed issue
This commit is contained in:
parent
4f309e625c
commit
c26980f1c4
|
@ -61,7 +61,7 @@ class MappingORCIDToOAFTest {
|
|||
assertTrue(oA == p.count())
|
||||
println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(p.first()))
|
||||
|
||||
|
||||
spark.close()
|
||||
}
|
||||
|
||||
|
||||
|
@ -78,8 +78,8 @@ class MappingORCIDToOAFTest {
|
|||
val oaf = ORCIDToOAF.convertTOOAF(orcid)
|
||||
assert(oaf.getPid.size() == 1)
|
||||
oaf.getPid.toList.foreach(pid => assert(pid.getQualifier.getClassid.equals("doi")))
|
||||
oaf.getPid.toList.foreach(pid => assert(pid.getValue.equals("10.1042/BCJ20160876".toLowerCase())))
|
||||
//println(mapper.writeValueAsString(ORCIDToOAF.convertTOOAF(orcid)))
|
||||
oaf.getPid.toList.foreach(pid => assert(pid.getValue.equals("10.1042/BCJ20160876")))
|
||||
//println(mapper.writeValueAsString(oaf))
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue