git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/branches/data-access/sh-fuse-integration/1.0@178913 82a268e6-3cf1-43bd-a215-b396298e98cf
commit
cefc28fda5
@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
<attribute name="test" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
<attribute name="test" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
@ -0,0 +1,23 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>sh-fuse-integration</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
@ -0,0 +1,6 @@
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8
|
||||
org.eclipse.jdt.core.compiler.compliance=1.8
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.release=disabled
|
||||
org.eclipse.jdt.core.compiler.source=1.8
|
@ -0,0 +1,4 @@
|
||||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
@ -0,0 +1,117 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.gcube.data-access</groupId>
|
||||
<artifactId>sh-fuse-integration</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<name>SHFuseIntegration</name>
|
||||
|
||||
<parent>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<version>1.0.0</version>
|
||||
<relativePath />
|
||||
</parent>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>bintray</name>
|
||||
<url>http://jcenter.bintray.com</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>gcube-bom</artifactId>
|
||||
<version>LATEST</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.github.serceman</groupId>
|
||||
<artifactId>jnr-fuse</artifactId>
|
||||
<version>0.5.2.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>gxJRS</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>storagehub-client-library</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>storagehub-model</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.cache2k</groupId>
|
||||
<artifactId>cache2k-jcache</artifactId>
|
||||
<version>1.2.0.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.0.13</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<mainClass>org.gcube.data.access.storagehub.fs.StorageHubFuseLauncher
|
||||
</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>jar-with-dependencies</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<configuration>
|
||||
<target>1.8</target>
|
||||
<source>1.8</source>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -0,0 +1,68 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class FSInputStream extends InputStream{
|
||||
|
||||
private boolean closed = false;
|
||||
|
||||
BlockingQueue<Byte> q = new LinkedBlockingQueue<Byte>(20000);
|
||||
|
||||
public int byteRead = 0;
|
||||
public int bytegiven = 0;
|
||||
|
||||
protected synchronized void add(byte[] buf) {
|
||||
for (byte b : buf)
|
||||
try {
|
||||
//System.out.println("adding "+b);
|
||||
q.put(b);
|
||||
byteRead++;
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
try {
|
||||
//System.out.println("q is empty ? "+q.isEmpty());
|
||||
Byte retrievedValue;
|
||||
do{
|
||||
retrievedValue=q.poll(2, TimeUnit.SECONDS);
|
||||
} while (retrievedValue==null && !closed);
|
||||
|
||||
if (closed && retrievedValue==null) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int value = retrievedValue & 0xFF;
|
||||
//System.out.println("reading byte: ==== "+value);
|
||||
bytegiven++;
|
||||
return value;
|
||||
} catch (InterruptedException e) {
|
||||
//System.out.println("interrupt -------------");
|
||||
e.printStackTrace();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return q.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
this.closed= true;
|
||||
super.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.gcube.common.storagehub.client.dsl.FileContainer;
|
||||
import org.gcube.common.storagehub.model.items.AbstractFileItem;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jnr.ffi.Pointer;
|
||||
import jnr.ffi.types.off_t;
|
||||
import jnr.ffi.types.size_t;
|
||||
import ru.serce.jnrfuse.ErrorCodes;
|
||||
import ru.serce.jnrfuse.struct.FileStat;
|
||||
|
||||
public class FileDownload implements SHFile{
|
||||
|
||||
public static Logger logger = LoggerFactory.getLogger(FileDownload.class);
|
||||
|
||||
InputStream stream;
|
||||
AbstractFileItem fileItem;
|
||||
|
||||
public FileDownload(FileContainer fileContainer) throws Exception {
|
||||
stream = fileContainer.download().getStream();
|
||||
fileItem = fileContainer.get();
|
||||
logger.trace("FILE-DOWNLOAD initialized with {} , {}", fileItem.getName(), fileItem.getContent().getSize());
|
||||
}
|
||||
|
||||
public synchronized int read(Pointer buf, @size_t long size, @off_t long offset) {
|
||||
logger.trace("read called with size {} and offset {} ", size, offset);
|
||||
|
||||
int bytesToRead = (int) (size);
|
||||
|
||||
byte[] mybuf = new byte[bytesToRead];
|
||||
int readTotal= 0;;
|
||||
try {
|
||||
|
||||
int read =0;
|
||||
while ((read= stream.read(mybuf, 0 , bytesToRead-readTotal))!=-1 && bytesToRead>readTotal) {
|
||||
buf.put(0, mybuf, 0, read);
|
||||
readTotal+= read;
|
||||
}
|
||||
|
||||
logger.trace("bytes to read {} and read total {} and last read {}", bytesToRead, readTotal, read);
|
||||
}catch (Exception e) {
|
||||
logger.error("error in read",e);
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e1) {}
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
|
||||
return readTotal;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public synchronized int flush() {
|
||||
logger.trace("called flush");
|
||||
//logger.trace("file is ready "+mapPathUpload.get(path).toString());
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e1) {
|
||||
logger.error("error closing stream",e1);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
public int getAttr(FileStat stat) {
|
||||
logger.trace("is in download");
|
||||
stat.st_mode.set(FileStat.S_IFREG | 0555);
|
||||
stat.st_size.set(fileItem.getContent().getSize());
|
||||
stat.st_mtim.tv_sec.set(fileItem.getLastModificationTime().toInstant().getEpochSecond());
|
||||
stat.st_mtim.tv_nsec.set(fileItem.getLastModificationTime().toInstant().getNano());
|
||||
stat.st_ctim.tv_sec.set(fileItem.getCreationTime().toInstant().getEpochSecond());
|
||||
stat.st_ctim.tv_nsec.set(fileItem.getCreationTime().toInstant().getNano());
|
||||
stat.st_atim.tv_sec.set(fileItem.getLastModificationTime().toInstant().getEpochSecond());
|
||||
stat.st_atim.tv_nsec.set(fileItem.getLastModificationTime().toInstant().getNano());
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,63 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jnr.ffi.Pointer;
|
||||
import ru.serce.jnrfuse.ErrorCodes;
|
||||
import ru.serce.jnrfuse.struct.FileStat;
|
||||
|
||||
public class FileUpload implements SHFile {
|
||||
|
||||
public static Logger logger = LoggerFactory.getLogger(FileUpload.class);
|
||||
|
||||
FSInputStream stream;
|
||||
|
||||
private int bytesRead =0;
|
||||
|
||||
public FileUpload(FSInputStream stream) {
|
||||
super();
|
||||
this.stream = stream;
|
||||
}
|
||||
|
||||
public synchronized int write(Pointer buf, long size, long offset) {
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling write "+ size+" "+offset);
|
||||
if (stream==null) return -ErrorCodes.ENOENT();
|
||||
byte[] mybuf = new byte[(int)size];
|
||||
try {
|
||||
buf.get(0, mybuf, 0, (int)size);
|
||||
stream.add(mybuf);
|
||||
}catch (Exception e) {
|
||||
logger.error("error on download",e);
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e1) {}
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
bytesRead+=size;
|
||||
return (int)size;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized int flush() {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getAttr(FileStat stat) {
|
||||
stat.st_mode.set(FileStat.S_IFREG | 0555);
|
||||
stat.st_size.set(bytesRead);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
|
||||
import org.cache2k.Cache;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.ItemContainer;
|
||||
import org.gcube.common.storagehub.model.items.Item;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class PathUtils {
|
||||
|
||||
public static Logger logger = LoggerFactory.getLogger(PathUtils.class);
|
||||
|
||||
private Cache<String,ItemContainer<Item>> cache;
|
||||
private FolderContainer rootDirectory;
|
||||
|
||||
|
||||
public PathUtils(Cache<String, ItemContainer<Item>> cache, FolderContainer rootDirectory) {
|
||||
super();
|
||||
this.cache = cache;
|
||||
this.rootDirectory = rootDirectory;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public String getLastComponent(String path) {
|
||||
while (path.substring(path.length() - 1).equals("/")) {
|
||||
path = path.substring(0, path.length() - 1);
|
||||
}
|
||||
if (path.isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
return path.substring(path.lastIndexOf("/") + 1);
|
||||
}
|
||||
|
||||
public String getParentPath(String path) {
|
||||
return Paths.get(path).getParent().toString();
|
||||
}
|
||||
|
||||
public ItemContainer<? extends Item> getPath(String path) {
|
||||
|
||||
if (path.equals("/")) return rootDirectory;
|
||||
|
||||
if (cache.containsKey(path)) {
|
||||
ItemContainer<? extends Item> cached = cache.peek(path);
|
||||
logger.trace("path "+path+" retrieved in cache with id "+cached.getId());
|
||||
return cached;
|
||||
} else logger.trace("path "+path+" not in cache");
|
||||
|
||||
synchronized (this) {
|
||||
ItemContainer<? extends Item> retrievedItem = getPathRecursive(path, rootDirectory);
|
||||
if (retrievedItem!=null)cache.put(path, (ItemContainer<Item>) retrievedItem);
|
||||
return retrievedItem;
|
||||
}
|
||||
}
|
||||
|
||||
public ItemContainer<? extends Item> getPathRecursive(String path, FolderContainer parentContainer) {
|
||||
try {
|
||||
while (path.startsWith("/")) {
|
||||
path = path.substring(1);
|
||||
}
|
||||
if (!path.contains("/")) {
|
||||
logger.trace("seaching path "+path+" in "+parentContainer.get().getTitle());
|
||||
List<ItemContainer<? extends Item>> items = parentContainer.findByName(path).withContent().getContainers();
|
||||
logger.trace("found? "+(items.size()>0));
|
||||
return items.size()>0? items.get(0): null;
|
||||
}
|
||||
String nextName = path.substring(0, path.indexOf("/"));
|
||||
String rest = path.substring(path.indexOf("/"));
|
||||
|
||||
for (ItemContainer<? extends Item> container : parentContainer.findByName(nextName).withContent().getContainers()) {
|
||||
if (container instanceof FolderContainer) {
|
||||
logger.trace("seaching path "+rest+" in "+container.get().getTitle());
|
||||
return getPathRecursive(rest, (FolderContainer)container);
|
||||
}
|
||||
}
|
||||
}catch(Exception e) {
|
||||
logger.error("error in gpath recursive",e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import jnr.ffi.Pointer;
|
||||
import jnr.ffi.types.off_t;
|
||||
import jnr.ffi.types.size_t;
|
||||
import ru.serce.jnrfuse.ErrorCodes;
|
||||
import ru.serce.jnrfuse.struct.FileStat;
|
||||
|
||||
public interface SHFile {
|
||||
|
||||
default int read(Pointer buf, @size_t long size, @off_t long offset) {
|
||||
return -ErrorCodes.ENOSYS();
|
||||
}
|
||||
|
||||
default int write(Pointer buf, long size, long offset) {
|
||||
return -ErrorCodes.ENOSYS();
|
||||
}
|
||||
|
||||
int flush();
|
||||
|
||||
int getAttr(FileStat stat);
|
||||
}
|
@ -0,0 +1,483 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.cache2k.Cache;
|
||||
import org.cache2k.Cache2kBuilder;
|
||||
import org.gcube.common.authorization.library.AuthorizedTasks;
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.common.storagehub.client.dsl.ContainerType;
|
||||
import org.gcube.common.storagehub.client.dsl.FileContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.FolderContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.ItemContainer;
|
||||
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
|
||||
import org.gcube.common.storagehub.model.exceptions.StorageHubException;
|
||||
import org.gcube.common.storagehub.model.exceptions.UserNotAuthorizedException;
|
||||
import org.gcube.common.storagehub.model.items.AbstractFileItem;
|
||||
import org.gcube.common.storagehub.model.items.FolderItem;
|
||||
import org.gcube.common.storagehub.model.items.Item;
|
||||
import org.gcube.common.storagehub.model.items.SharedFolder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import jnr.ffi.Pointer;
|
||||
import jnr.ffi.types.mode_t;
|
||||
import jnr.ffi.types.off_t;
|
||||
import jnr.ffi.types.size_t;
|
||||
import ru.serce.jnrfuse.ErrorCodes;
|
||||
import ru.serce.jnrfuse.FuseFillDir;
|
||||
import ru.serce.jnrfuse.FuseStubFS;
|
||||
import ru.serce.jnrfuse.struct.FileStat;
|
||||
import ru.serce.jnrfuse.struct.FuseFileInfo;
|
||||
|
||||
public class StorageHubFS extends FuseStubFS {
|
||||
|
||||
public static Logger logger = LoggerFactory.getLogger(StorageHubFS.class);
|
||||
|
||||
StorageHubClient client;
|
||||
|
||||
String token;
|
||||
|
||||
String scope;
|
||||
|
||||
HashMap<String, SHFile> tempFiles = new HashMap<>();
|
||||
|
||||
static final String VREFOLDERS_NAME= "VREFolders";
|
||||
|
||||
Cache<String,ItemContainer<Item>> cache;
|
||||
|
||||
PathUtils pathUtils;
|
||||
|
||||
private FolderContainer rootDirectory;
|
||||
|
||||
public StorageHubFS(String token, String scope) {
|
||||
super();
|
||||
this.token = token;
|
||||
this.scope = scope;
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
client = new StorageHubClient();
|
||||
rootDirectory = client.getWSRoot();
|
||||
cache = new Cache2kBuilder<String, ItemContainer<Item>>() {}
|
||||
.expireAfterWrite(30, TimeUnit.SECONDS)
|
||||
.resilienceDuration(30, TimeUnit.SECONDS)
|
||||
.build();
|
||||
pathUtils = new PathUtils(cache, rootDirectory);
|
||||
}
|
||||
|
||||
/*
|
||||
* fileUpload
|
||||
* @see ru.serce.jnrfuse.FuseStubFS#write(java.lang.String, jnr.ffi.Pointer, long, long, ru.serce.jnrfuse.struct.FuseFileInfo)
|
||||
*/
|
||||
@Override
|
||||
public synchronized int write(String path, Pointer buf, long size, long offset, FuseFileInfo fi) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling write "+ size+" "+offset);
|
||||
SHFile file = tempFiles.get(path);
|
||||
return file.write(buf, size, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized int flush(String path, FuseFileInfo fi) {
|
||||
logger.trace("called flush for "+path);
|
||||
tempFiles.get(path).flush();
|
||||
tempFiles.remove(path);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* fileCreation
|
||||
* @see ru.serce.jnrfuse.FuseStubFS#write(java.lang.String, jnr.ffi.Pointer, long, long, ru.serce.jnrfuse.struct.FuseFileInfo)
|
||||
*/
|
||||
@Override
|
||||
public synchronized int create(final String path, @mode_t long mode, FuseFileInfo fi) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling create "+path);
|
||||
if (pathUtils.getPath(path) != null) {
|
||||
return -ErrorCodes.EEXIST();
|
||||
}
|
||||
|
||||
final ItemContainer<? extends Item> parentContainer;
|
||||
|
||||
if (path.substring(1).contains("/")) {
|
||||
String parentPath = Paths.get(path).getParent().toString();
|
||||
parentContainer= pathUtils.getPath(parentPath);
|
||||
} else parentContainer = rootDirectory;
|
||||
|
||||
final FSInputStream stream = new FSInputStream();
|
||||
|
||||
FileUpload fileUpload = new FileUpload(stream);
|
||||
tempFiles.put(path, fileUpload);
|
||||
new Thread(AuthorizedTasks.bind(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
((FolderContainer) parentContainer).uploadFile(stream, pathUtils.getLastComponent(path), "");
|
||||
}catch(Throwable t) {
|
||||
t.printStackTrace();
|
||||
tempFiles.get(path).flush();
|
||||
}
|
||||
}
|
||||
})).start();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int getattr(String path, FileStat stat) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling getattr "+path);
|
||||
if (Objects.equals(path, "/") || path.contains("Trash") || path.equals("/"+VREFOLDERS_NAME)) {
|
||||
stat.st_mode.set(FileStat.S_IFDIR | 0755);
|
||||
stat.st_nlink.set(2);
|
||||
} else if(pathUtils.getLastComponent(path).startsWith(".")) {
|
||||
logger.trace("start with /.");
|
||||
return super.getattr(path, stat);
|
||||
} else if (tempFiles.containsKey(path)){
|
||||
return tempFiles.get(path).getAttr(stat);
|
||||
}else {
|
||||
logger.trace("trying items");
|
||||
ItemContainer<? extends Item> container = pathUtils.getPath(path);
|
||||
logger.trace("item for path "+path+" is null ? "+(container==null));
|
||||
if (container==null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}else
|
||||
try{
|
||||
getAttrSHItem(container, stat);
|
||||
}catch (Throwable e) {
|
||||
logger.error("error gettign attributes ",e);
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
private void getAttrSHItem(ItemContainer<? extends Item> container, FileStat stat) throws IllegalArgumentException{
|
||||
if (container.getType()==ContainerType.FILE) {
|
||||
|
||||
AbstractFileItem fileItem = ((AbstractFileItem)container.get());
|
||||
stat.st_size.set(fileItem.getContent().getSize());
|
||||
setCommonAttributes(fileItem, stat, FileStat.S_IFREG);
|
||||
logger.trace("fileContent is "+fileItem.getContent().getSize());
|
||||
|
||||
|
||||
} else if (container.getType()==ContainerType.FOLDER) {
|
||||
FolderItem folderItem = ((FolderItem)container.get());
|
||||
stat.st_size.set(4096);
|
||||
setCommonAttributes(folderItem, stat, FileStat.S_IFDIR);
|
||||
} else throw new IllegalArgumentException("container type not valid");
|
||||
}
|
||||
|
||||
|
||||
private void setCommonAttributes(Item item, FileStat stat, int type) {
|
||||
if (item.isShared()) {
|
||||
stat.st_mode.set(type | FileStat.S_IROTH);
|
||||
}else {
|
||||
stat.st_mode.set(type | 0755);
|
||||
}
|
||||
stat.st_mtim.tv_sec.set(item.getLastModificationTime().toInstant().getEpochSecond());
|
||||
stat.st_mtim.tv_nsec.set(item.getLastModificationTime().toInstant().getNano());
|
||||
stat.st_ctim.tv_sec.set(item.getCreationTime().toInstant().getEpochSecond());
|
||||
stat.st_ctim.tv_nsec.set(item.getCreationTime().toInstant().getNano());
|
||||
stat.st_atim.tv_sec.set(item.getLastModificationTime().toInstant().getEpochSecond());
|
||||
stat.st_atim.tv_nsec.set(item.getLastModificationTime().toInstant().getNano());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public int mkdir(String path, @mode_t long mode) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling mkdir");
|
||||
if (pathUtils.getPath(path) != null) {
|
||||
return -ErrorCodes.EEXIST();
|
||||
}
|
||||
|
||||
ItemContainer<? extends Item> parentContainer;
|
||||
|
||||
if (path.substring(1).contains("/")) {
|
||||
String parentPath = Paths.get(path).getParent().toString();
|
||||
parentContainer= pathUtils.getPath(parentPath);
|
||||
} else parentContainer = rootDirectory;
|
||||
|
||||
FolderContainer parentDir = (FolderContainer) parentContainer;
|
||||
String dirName= pathUtils.getLastComponent(path);
|
||||
try {
|
||||
parentDir.newFolder(dirName,dirName );
|
||||
return 0;
|
||||
} catch (Exception e) {
|
||||
logger.error("error in mkdir",e);
|
||||
return -ErrorCodes.ENOENT();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* fileDownload
|
||||
* @see ru.serce.jnrfuse.FuseStubFS#write(java.lang.String, jnr.ffi.Pointer, long, long, ru.serce.jnrfuse.struct.FuseFileInfo)
|
||||
*/
|
||||
@Override
|
||||
public int read(String path, Pointer buf, @size_t long size, @off_t long offset, FuseFileInfo fi) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace("!!! read called in path {} with size {} and offset {} ",path, size, offset);
|
||||
|
||||
SHFile fileDownload;
|
||||
if (tempFiles.containsKey(path)) {
|
||||
fileDownload = tempFiles.get(path);
|
||||
} else {
|
||||
ItemContainer<? extends Item> item = pathUtils.getPath(path);
|
||||
if (item == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
if (item.getType()!=ContainerType.FILE) {
|
||||
return -ErrorCodes.EISDIR();
|
||||
}
|
||||
|
||||
try {
|
||||
fileDownload = new FileDownload((FileContainer)item);
|
||||
} catch (Exception e) {
|
||||
logger.error("error reading remote file",e);
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
|
||||
tempFiles.put(path, fileDownload);
|
||||
}
|
||||
|
||||
return fileDownload.read(buf, size, offset);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* list dir
|
||||
* @see ru.serce.jnrfuse.FuseStubFS#write(java.lang.String, jnr.ffi.Pointer, long, long, ru.serce.jnrfuse.struct.FuseFileInfo)
|
||||
*/
|
||||
@Override
|
||||
public int readdir(String path, Pointer buf, FuseFillDir filter, @off_t long offset, FuseFileInfo fi) {
|
||||
logger.trace("readdir called");
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
logger.trace(Thread.currentThread().getName()+" ) calling readdir "+path);
|
||||
if (path.contains(".Trash")) return 0;
|
||||
|
||||
List<ItemContainer <? extends Item>> containers;
|
||||
|
||||
if (path.equals("/"+VREFOLDERS_NAME)) {
|
||||
try {
|
||||
containers= client.getVREFolders().getContainers();
|
||||
}catch(StorageHubException she) {
|
||||
logger.error("error reading dir",she);
|
||||
return -ErrorCodes.EACCES();
|
||||
}
|
||||
}else {
|
||||
|
||||
ItemContainer<? extends Item> container = pathUtils.getPath(path);
|
||||
if (container == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
if (!(container.getType()==ContainerType.FOLDER)) {
|
||||
return -ErrorCodes.ENOTDIR();
|
||||
}
|
||||
try {
|
||||
logger.trace("reading folder "+path);
|
||||
containers = ((FolderContainer)container).list().withContent().getContainers();
|
||||
logger.trace("folder read "+path);
|
||||
}catch(UserNotAuthorizedException una) {
|
||||
logger.error("folder error ",una);
|
||||
return -ErrorCodes.EACCES();
|
||||
}catch(StorageHubException she) {
|
||||
logger.error("folder error ",she);
|
||||
return -ErrorCodes.EREMOTEIO();
|
||||
}catch(Throwable t) {
|
||||
logger.error("folder error ",t);
|
||||
throw new RuntimeException(t);
|
||||
}
|
||||
}
|
||||
filter.apply(buf, ".", null, 0);
|
||||
filter.apply(buf, "..", null, 0);
|
||||
|
||||
for (ItemContainer <? extends Item> child : containers ) {
|
||||
try {
|
||||
Item it = child.get();
|
||||
filter.apply(buf, it.getTitle(), null, 0);
|
||||
if (path.charAt(path.length() - 1)!='/')
|
||||
path+="/";
|
||||
cache.put(path+it.getTitle(), (ItemContainer<Item>) child);
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("error riding children ",e);
|
||||
}
|
||||
}
|
||||
|
||||
logger.trace("tempFiles.entrySet() is empty ? {}",(tempFiles.entrySet().isEmpty()));
|
||||
|
||||
for(Entry<String, SHFile> entry: tempFiles.entrySet()) {
|
||||
logger.trace("entry in temp map {}", entry.getKey());
|
||||
if (entry.getValue() instanceof FileUpload || pathUtils.getParentPath(entry.getKey()).equals(path)) {
|
||||
filter.apply(buf, pathUtils.getLastComponent(entry.getKey()), null, 0);
|
||||
logger.trace("last temp entry added {}", entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
if (path.equals("/")) filter.apply(buf, VREFOLDERS_NAME , null, 0);
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/* @Override
|
||||
public int statfs(String path, Statvfs stbuf) {
|
||||
if (Platform.getNativePlatform().getOS() == WINDOWS) {
|
||||
// statfs needs to be implemented on Windows in order to allow for copying
|
||||
// data from other devices because winfsp calculates the volume size based
|
||||
// on the statvfs call.
|
||||
// see https://github.com/billziss-gh/winfsp/blob/14e6b402fe3360fdebcc78868de8df27622b565f/src/dll/fuse/fuse_intf.c#L654
|
||||
if ("/".equals(path)) {
|
||||
stbuf.f_blocks.set(1024 * 1024); // total data blocks in file system
|
||||
stbuf.f_frsize.set(1024); // fs block size
|
||||
stbuf.f_bfree.set(1024 * 1024); // free blocks in fs
|
||||
}
|
||||
}
|
||||
return super.statfs(path, stbuf);
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
@Override
|
||||
public int rename(String path, String newName) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
ItemContainer<? extends Item> folder = pathUtils.getPath(path);
|
||||
if (folder == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
ItemContainer<? extends Item> newParent = pathUtils.getPath(pathUtils.getParentPath(newName));
|
||||
if (newParent == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
if (newParent.getType()!=ContainerType.FOLDER) {
|
||||
return -ErrorCodes.ENOTDIR();
|
||||
}
|
||||
|
||||
try {
|
||||
if (newParent.getId()!=folder.get().getParentId()) {
|
||||
folder.move((FolderContainer)newParent);
|
||||
}
|
||||
|
||||
if (!pathUtils.getLastComponent(newName).equals(pathUtils.getLastComponent(path)))
|
||||
folder.rename(pathUtils.getLastComponent(newName));
|
||||
cache.remove(path);
|
||||
}catch(UserNotAuthorizedException una) {
|
||||
return -ErrorCodes.EACCES();
|
||||
}catch(StorageHubException she) {
|
||||
return -ErrorCodes.EREMOTEIO();
|
||||
}
|
||||
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int rmdir(String path) {
|
||||
if (path.equals("/"+VREFOLDERS_NAME))
|
||||
return -ErrorCodes.EACCES();
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
ItemContainer<? extends Item> folder = pathUtils.getPath(path);
|
||||
if (folder == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
if (folder.getType()!=ContainerType.FOLDER) {
|
||||
return -ErrorCodes.ENOTDIR();
|
||||
}
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
try {
|
||||
checkSpecialFolderRemove(path);
|
||||
|
||||
if (folder.get() instanceof SharedFolder && ((SharedFolder) folder.get()).isVreFolder())
|
||||
return -ErrorCodes.EACCES();
|
||||
|
||||
folder.delete();
|
||||
cache.remove(path);
|
||||
}catch(UserNotAuthorizedException una) {
|
||||
return -ErrorCodes.EACCES();
|
||||
}catch(StorageHubException she) {
|
||||
return -ErrorCodes.EREMOTEIO();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void checkSpecialFolderRemove(String path) throws UserNotAuthorizedException{
|
||||
if (path.equals(String.format("/%s", VREFOLDERS_NAME))) throw new UserNotAuthorizedException(VREFOLDERS_NAME+" cannot be deleted");
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* delete file
|
||||
* @see ru.serce.jnrfuse.FuseStubFS#write(java.lang.String, jnr.ffi.Pointer, long, long, ru.serce.jnrfuse.struct.FuseFileInfo)
|
||||
*/
|
||||
@Override
|
||||
public int unlink(String path) {
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
|
||||
ItemContainer<? extends Item> file = pathUtils.getPath(path);
|
||||
if (file == null) {
|
||||
return -ErrorCodes.ENOENT();
|
||||
}
|
||||
if (file.getType()!=ContainerType.FILE) {
|
||||
return -ErrorCodes.EISDIR();
|
||||
}
|
||||
ScopeProvider.instance.set(scope);
|
||||
SecurityTokenProvider.instance.set(token);
|
||||
try {
|
||||
file.delete();
|
||||
cache.remove(path);
|
||||
}catch(UserNotAuthorizedException una) {
|
||||
return -ErrorCodes.EACCES();
|
||||
}catch(StorageHubException she) {
|
||||
return -ErrorCodes.EREMOTEIO();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int open(String path, FuseFileInfo fi) {
|
||||
logger.info("open called");
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
@Override
|
||||
public int access(String path, int mask) {
|
||||
logger.trace("access function called "+path+" "+mask);
|
||||
return super.access(path, mask);
|
||||
}
|
||||
*/
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
package org.gcube.data.access.storagehub.fs;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
|
||||
public class StorageHubFuseLauncher {
|
||||
|
||||
|
||||
public static void main(String ...args) {
|
||||
String token = args[0];
|
||||
String scope = args[1];
|
||||
String path = args[2];
|
||||
|
||||
StorageHubFS shFS= new StorageHubFS(token, scope);
|
||||
shFS.mount(Paths.get(path), true, true);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package org.gcube.data.access.storagehub.fuse;
|
||||
|
||||
import java.nio.file.Paths;
|
||||
|
||||
import org.gcube.data.access.storagehub.fs.StorageHubFS;
|
||||
import org.junit.Test;
|
||||
|
||||
import jnr.ffi.Platform;
|
||||
|
||||
public class FuseTest {
|
||||
|
||||
@Test
|
||||
public void mount() {
|
||||
StorageHubFS memfs = new StorageHubFS("b7c80297-e4ed-42ab-ab42-fdc0b8b0eabf-98187548","/gcube");
|
||||
try {
|
||||
String path;
|
||||
switch (Platform.getNativePlatform().getOS()) {
|
||||
case WINDOWS:
|
||||
path = "J:\\";
|
||||
break;
|
||||
default:
|
||||
path = "/home/lucio/javaMount/mnt1";
|
||||
}
|
||||
memfs.mount(Paths.get(path), true, true);
|
||||
} finally {
|
||||
memfs.umount();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
<configuration>
|
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{0}: %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
|
||||
<logger name="org.gcube.data.access.storagehub.fs" level="TRACE" />
|
||||
<logger name="org.gcube" level="WARN" />
|
||||
|
||||
<root level="WARN">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
Loading…
Reference in New Issue