1
0
Fork 0

Hosted By Map - test modified because of the application of the new aggregator on datasources

This commit is contained in:
Miriam Baglioni 2021-08-04 10:19:17 +02:00
parent e94ae0b1de
commit eb8c3f8594
1 changed files with 2 additions and 2 deletions

View File

@ -98,7 +98,7 @@ class TestApply extends java.io.Serializable{
val dats_ds :Dataset[Datasource] = spark.read.textFile(dats).map(p => mapper.readValue(p, classOf[Datasource])) val dats_ds :Dataset[Datasource] = spark.read.textFile(dats).map(p => mapper.readValue(p, classOf[Datasource]))
val hbm_ds :Dataset[EntityInfo] = spark.read.textFile(hbm).map(p => mapper.readValue(p, classOf[EntityInfo])) val hbm_ds :Dataset[EntityInfo] = Aggregators.datasourceToSingleId(spark.read.textFile(hbm).map(p => mapper.readValue(p, classOf[EntityInfo])))
assertEquals(10, dats_ds.count()) assertEquals(10, dats_ds.count())
@ -122,7 +122,7 @@ class TestApply extends java.io.Serializable{
}else{ }else{
assertTrue(pa.getOpenairecompatibility().getClassid.equals(pb.getOpenairecompatibility.getClassid)) assertTrue(pa.getOpenairecompatibility().getClassid.equals(pb.getOpenairecompatibility.getClassid))
assertTrue(pa.getOpenairecompatibility().getClassname.equals(pb.getOpenairecompatibility.getClassid)) assertTrue(pa.getOpenairecompatibility().getClassname.equals(pb.getOpenairecompatibility.getClassname))
} }
}) })