1
0
Fork 0

added java doc

This commit is contained in:
Miriam Baglioni 2020-11-25 14:08:09 +01:00
parent 1f130cdf92
commit b2c455f883
2 changed files with 7 additions and 3 deletions

View File

@ -25,9 +25,8 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
import scala.Tuple2; import scala.Tuple2;
/** /**
* Preparation of the Project information to be added to the dumped results. For each result associated to at least one * Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
* Project, a serialization of an instance af ResultProject closs is done. ResultProject contains the resultId, and the * for the EC it specifies also the fundingStream (FP7 or H2020)
* list of Projects (as in eu.dnetlib.dhp.schema.dump.oaf.community.Project) it is associated to
*/ */
public class SparkDumpFunderResults implements Serializable { public class SparkDumpFunderResults implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SparkDumpFunderResults.class); private static final Logger log = LoggerFactory.getLogger(SparkDumpFunderResults.class);

View File

@ -24,6 +24,11 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import scala.Tuple2; import scala.Tuple2;
/**
* Selects the results linked to projects. Only for these results the dump will be performed.
* The code to perform the dump and to expend the dumped results with the informaiton related to projects
* is the one used for the dump of the community products
*/
public class SparkResultLinkedToProject implements Serializable { public class SparkResultLinkedToProject implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SparkResultLinkedToProject.class); private static final Logger log = LoggerFactory.getLogger(SparkResultLinkedToProject.class);