forked from D-Net/dnet-hadoop
fixed issue and changed the transformation of the programme file to consider the new model
This commit is contained in:
parent
e917281822
commit
969fa8d96e
|
@ -33,7 +33,7 @@ public class ReadProjectsFromDB implements Closeable {
|
|||
private final BufferedWriter writer;
|
||||
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private final static String query = "SELECT code , optional1, optional2" +
|
||||
private final static String query = "SELECT code , optional1, optional2 " +
|
||||
"from projects where id like 'corda__h2020%' ";
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
|
|
|
@ -147,7 +147,7 @@ public class SparkAtomicActionJob {
|
|||
}
|
||||
|
||||
private static void setLevels(H2020Classification h2020Classification, String classification) {
|
||||
String[] tmp = classification.split(" | ");
|
||||
String[] tmp = classification.split(" \\| ");
|
||||
h2020Classification.setLevel1(tmp[0]);
|
||||
if (tmp.length > 1) {
|
||||
h2020Classification.setLevel2(tmp[1]);
|
||||
|
|
|
@ -7,10 +7,14 @@ import java.nio.file.Path;
|
|||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVParser;
|
||||
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||
|
||||
public class CSVParserTest {
|
||||
|
||||
|
|
Loading…
Reference in New Issue