changed to mirror the new Zenodo API interaction
This commit is contained in:
parent
ec1dac5847
commit
a25e8071c5
|
@ -66,7 +66,7 @@ public class MakeTar implements Serializable {
|
|||
String pathString = p.toString();
|
||||
String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
|
||||
|
||||
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit);
|
||||
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ public class SendToZenodoHDFS implements Serializable {
|
|||
String name = pString.substring(pString.lastIndexOf("/") + 1);
|
||||
|
||||
FSDataInputStream inputStream = fileSystem.open(p);
|
||||
zenodoApiClient.uploadIS(inputStream, name, fileStatus.getLen());
|
||||
zenodoApiClient.uploadIS(inputStream, name);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,154 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.common.api.MissingConceptDoiException;
|
||||
import eu.dnetlib.dhp.common.api.ZenodoAPIClient;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.eosc.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.eosc.Utils;
|
||||
|
||||
@Disabled
|
||||
public class ZenodoUploadTest {
|
||||
|
||||
private static String workingDir;
|
||||
|
||||
private final String URL_STRING = "https://sandbox.zenodo.org/api/deposit/depositions";
|
||||
private final String ACCESS_TOKEN = "";
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
workingDir = Files
|
||||
.createTempDirectory(UpdateProjectInfoTest.class.getSimpleName())
|
||||
.toString();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNewDeposition() throws IOException {
|
||||
CommunityMap communityMap = new CommunityMap();
|
||||
communityMap.put("ni", "Neuroinformatics");
|
||||
communityMap.put("dh-ch", "Digital Humanities and Cultural Heritage");
|
||||
LocalFileSystem fs = FileSystem.getLocal(new Configuration());
|
||||
|
||||
fs
|
||||
.copyFromLocalFile(
|
||||
false, new Path(getClass()
|
||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/zenodo/ni")
|
||||
.getPath()),
|
||||
new Path(workingDir + "/zenodo/ni/ni"));
|
||||
fs
|
||||
.copyFromLocalFile(
|
||||
false, new Path(getClass()
|
||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/zenodo/dh-ch")
|
||||
.getPath()),
|
||||
new Path(workingDir + "/zenodo/dh-ch/dh-ch"));
|
||||
|
||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
||||
ACCESS_TOKEN);
|
||||
client.newDeposition();
|
||||
|
||||
// the second boolean parameter here sets the recursion to true
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fs
|
||||
.listFiles(
|
||||
new Path(workingDir + "/zenodo"), true);
|
||||
while (fileStatusListIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||
|
||||
String p_string = fileStatus.getPath().toString();
|
||||
|
||||
int index = p_string.lastIndexOf("/");
|
||||
String community = p_string.substring(0, index);
|
||||
community = community.substring(community.lastIndexOf("/") + 1);
|
||||
String community_name = communityMap.get(community).replace(" ", "_");
|
||||
// fs.copyToLocalFile(fileStatus.getPath(), new Path("/tmp/" + community_name));
|
||||
System.out.println(community);
|
||||
|
||||
// File f = new File("/tmp/" + community_name);
|
||||
FSDataInputStream inputStream = fs.open(fileStatus.getPath());
|
||||
System.out.println(client.uploadIS(inputStream, community_name, fileStatus.getLen()));
|
||||
|
||||
}
|
||||
|
||||
String metadata = "{\"metadata\":{\"access_right\":\"open\",\"communities\":[{\"identifier\":\"openaire-research-graph\"}],\"creators\":[{\"affiliation\":\"CNR - ISTI\",\"name\":\"Manghi, Paolo\",\"orcid\":\"0000-0001-7291-3210\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"Atzori, Claudio\",\"orcid\":\"0000-0001-9613-6639\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"Bardi, Alessia\",\"orcid\":\"0000-0002-1112-1292\"},{\"affiliation\":\"ISTI - CNR\",\"name\":\"Baglioni, Miriam\",\"orcid\":\"0000-0002-2273-9004\"},{\"affiliation\":\"University of Bielefeld\",\"name\":\"Shirrwagen, Jochen\"},{\"affiliation\":\"Athena Research and Innovation Centre\",\"name\":\"Dimitropoulos, Harry\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"La Bruzzo, Sandro\",\"orcid\":\"0000-0003-2855-1245\"},{\"affiliation\":\"Athena Research and Innovation Centre\",\"name\":\"Foufoulas, Ioannis\"},{\"affiliation\":\"University of Bielefeld\",\"name\":\"Löhden, Aenne\"},{\"affiliation\":\"University of Bielefeld\",\"name\":\"Bäcker, Amelie\",\"orcid\":\"0000-0001-6015-2063\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"Mannocci, Andrea\",\"orcid\":\"0000-0002-5193-7851\"},{\"affiliation\":\"University of Warsaw\",\"name\":\"Horst, Marek\"},{\"affiliation\":\"University of Bielefeld\",\"name\":\"Czerniak, Andreas\",\"orcid\":\"0000-0003-3883-4169\"},{\"affiliation\":\"Athena Research and Innovation Centre\",\"name\":\"Kiatropoulou, Katerina\"},{\"affiliation\":\"Athena Research and Innovation Centre\",\"name\":\"Kokogiannaki, Argiro\",\"orcid\":\"0000-0002-3880-0244\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"De Bonis, Michele\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"Artini, Michele\"},{\"affiliation\":\"CNR - ISTI\",\"name\":\"Ottonello, Enrico\"},{\"affiliation\":\"Athena Research and Innovation Centre\",\"name\":\"Lempesis, Antonis\"},{\"affiliation\":\"CERN\",\"name\":\"Ioannidis, Alexandros\"},{\"affiliation\":\"University of Bielefeld\",\"name\":\"Summan, Friedrich\"}],\"description\":\"\\u003cp\\u003eThis dataset contains dumps of the OpenAIRE Research Graph containing metadata records relevant for the research communities and initiatives collaborating with OpenAIRE\\u003c/p\\u003e. \\u003cp\\u003eEach dataset is a zip containing a file with one json per line. Each json is compliant to the schema available at XXXX\\u003c/p\\u003e Note that the file that is offered is not a typical json file: each line contains a separate, self-contained json object. For more information please see http://jsonlines.org\",\"grants\":[{\"id\":\"777541\"},{\"id\":\"824091\"},{\"id\":\"824323\"}],\"keywords\":[\"Open Science\",\"Scholarly Communication\",\"Information Science\"],\"language\":\"eng\",\"license\":\"CC-BY-4.0\",\"title\":\"OpenAIRE Research Graph: Dumps for research communities and initiatives.\",\"upload_type\":\"dataset\",\"version\":\"1.0\"}}";
|
||||
|
||||
System.out.println(client.sendMretadata(metadata));
|
||||
|
||||
System.out.println(client.publish());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testNewVersion() throws IOException, MissingConceptDoiException {
|
||||
|
||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
||||
ACCESS_TOKEN);
|
||||
|
||||
client.newVersion("656628");
|
||||
|
||||
CommunityMap communityMap = new CommunityMap();
|
||||
communityMap.put("ni", "Neuroinformatics");
|
||||
communityMap.put("dh-ch", "Digital Humanities and Cultural Heritage");
|
||||
LocalFileSystem fs = FileSystem.getLocal(new Configuration());
|
||||
|
||||
fs
|
||||
.copyFromLocalFile(
|
||||
false, new Path(getClass()
|
||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/zenodo/ni")
|
||||
.getPath()),
|
||||
new Path(workingDir + "/zenodo/ni/ni"));
|
||||
fs
|
||||
.copyFromLocalFile(
|
||||
false, new Path(getClass()
|
||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/zenodo/dh-ch")
|
||||
.getPath()),
|
||||
new Path(workingDir + "/zenodo/dh-ch/dh-ch"));
|
||||
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fs
|
||||
.listFiles(
|
||||
new Path(workingDir + "/zenodo"), true);
|
||||
while (fileStatusListIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||
|
||||
String p_string = fileStatus.getPath().toString();
|
||||
|
||||
int index = p_string.lastIndexOf("/");
|
||||
String community = p_string.substring(0, index);
|
||||
community = community.substring(community.lastIndexOf("/") + 1);
|
||||
String community_name = communityMap.get(community).replace(" ", "_");
|
||||
// fs.copyToLocalFile(fileStatus.getPath(), new Path("/tmp/" + community_name));
|
||||
System.out.println(community);
|
||||
|
||||
// File f = new File("/tmp/" + community_name);
|
||||
FSDataInputStream inputStream = fs.open(fileStatus.getPath());
|
||||
System.out.println(client.uploadIS(inputStream, community_name, fileStatus.getLen()));
|
||||
|
||||
}
|
||||
|
||||
System.out.println(client.publish());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void readCommunityMap() throws IOException {
|
||||
LocalFileSystem fs = FileSystem.getLocal(new Configuration());
|
||||
System.out
|
||||
.println(
|
||||
new Gson()
|
||||
.toJson(
|
||||
Utils
|
||||
.readCommunityMap(
|
||||
fs, getClass()
|
||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||
.getPath())));
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue