Compare commits
74 Commits
Author | SHA1 | Date |
---|---|---|
Luca Frosini | e42b2bde0a | |
Luca Frosini | 4ffaf4b082 | |
Luca Frosini | 984a1e5497 | |
Luca Frosini | 47092cf13e | |
Luca Frosini | 7b1631e8be | |
Luca Frosini | 8b49c4ffd2 | |
Luca Frosini | 8f701c10a5 | |
Luca Frosini | 8e4f173917 | |
Luca Frosini | f85fb9948f | |
Luca Frosini | b27a2695cf | |
Luca Frosini | 1da984cac0 | |
Luca Frosini | 6080c0ee41 | |
Luca Frosini | 7865bd81cc | |
Luca Frosini | c6049a122b | |
Luca Frosini | 1e31a71fce | |
Luca Frosini | 0917263f27 | |
luca.frosini | 30e8c84258 | |
luca.frosini | c324b3ab41 | |
luca.frosini | c5531d7c54 | |
luca.frosini | 8af757f3db | |
luca.frosini | 0812a0c94c | |
luca.frosini | 78ea9c2307 | |
luca.frosini | 6657543a63 | |
luca.frosini | 6a1e0e5838 | |
luca.frosini | 5fe0e5771b | |
luca.frosini | 9450134790 | |
luca.frosini | f2cf0a4f17 | |
luca.frosini | 24f2ec53c1 | |
luca.frosini | d2b797c19a | |
luca.frosini | 46cb4f0975 | |
luca.frosini | 816c7693dc | |
luca.frosini | ffd20403da | |
luca.frosini | 2dba63771b | |
luca.frosini | e0bfef4c49 | |
luca.frosini | 2d863b99fc | |
luca.frosini | 084e223669 | |
luca.frosini | e8a98dd54a | |
luca.frosini | 0878cdd7bd | |
luca.frosini | dd7ad6840e | |
luca.frosini | ca51189cab | |
luca.frosini | dfccd87a62 | |
luca.frosini | 00e0fc2a55 | |
luca.frosini | 996997804a | |
luca.frosini | 7ce298be17 | |
luca.frosini | b978185cd5 | |
luca.frosini | 68c742eb09 | |
luca.frosini | 7409a8d278 | |
luca.frosini | c611a839eb | |
luca.frosini | 1eb1cf6773 | |
luca.frosini | d786ffff98 | |
luca.frosini | 08fca98875 | |
luca.frosini | e5106cd6c0 | |
luca.frosini | 849cc923f3 | |
luca.frosini | 9851f83ecd | |
luca.frosini | 9b3ffcf5b4 | |
luca.frosini | c34c5f9a59 | |
luca.frosini | 22bedb9a75 | |
luca.frosini | 3f1a4f5795 | |
luca.frosini | 5741f52072 | |
luca.frosini | 290cb44221 | |
luca.frosini | 90e1ca1a28 | |
luca.frosini | af48246afa | |
luca.frosini | ee8bdefa8b | |
luca.frosini | 8c5dcd0341 | |
luca.frosini | 9d9ec44b90 | |
luca.frosini | 38668d8a21 | |
luca.frosini | 31f0120c50 | |
luca.frosini | 01716b55cd | |
luca.frosini | d650c16e29 | |
luca.frosini | d2ca74cc72 | |
luca.frosini | 95b86d8a40 | |
luca.frosini | 88c1ee60d8 | |
luca.frosini | 71b780eb2a | |
luca.frosini | 0cb336ded4 |
|
@ -2,6 +2,12 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
|
|||
|
||||
# Changelog for Resource Registry Service
|
||||
|
||||
## [v4.4.0-SNAPSHOT]
|
||||
|
||||
- Added query parameters to paginate result of queries [#24648]
|
||||
- Completely refactored JSON Query management [#24163]
|
||||
|
||||
|
||||
## [v4.3.0]
|
||||
|
||||
- Migrated code to reorganized E/R format [#24992]
|
||||
|
|
16
pom.xml
16
pom.xml
|
@ -10,7 +10,7 @@
|
|||
|
||||
<groupId>org.gcube.information-system</groupId>
|
||||
<artifactId>resource-registry</artifactId>
|
||||
<version>4.3.0</version>
|
||||
<version>4.4.0-SNAPSHOT</version>
|
||||
<name>Resource Registry Service</name>
|
||||
<description>The Resource Registry is a web-service which represent the core component of the gCube Information System</description>
|
||||
<packaging>war</packaging>
|
||||
|
@ -101,6 +101,20 @@
|
|||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<!-- END Jersey -->
|
||||
|
||||
<!-- Added to support Java 11 JDK -->
|
||||
<dependency>
|
||||
<groupId>javax.xml.ws</groupId>
|
||||
<artifactId>jaxws-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<!-- END Added to support Java 11 JDK -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
|
|
|
@ -6,6 +6,8 @@ import java.util.Calendar;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -35,12 +37,12 @@ public class ServerContextCache extends ContextCache {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(ServerContextCache.class);
|
||||
|
||||
protected List<Context> contextsNoMeta;
|
||||
protected Map<UUID, Context> uuidToContextNoMeta;
|
||||
|
||||
protected List<Context> contextsMetaPrivacy;
|
||||
protected Map<UUID, Context> uuidToContextMetaPrivacy;
|
||||
|
||||
protected List<Context> contextsNoMeta;
|
||||
protected Map<UUID, Context> uuidToContextNoMeta;
|
||||
|
||||
protected boolean includeMeta;
|
||||
|
||||
protected static ServerContextCache singleton;
|
||||
|
@ -60,15 +62,15 @@ public class ServerContextCache extends ContextCache {
|
|||
@Override
|
||||
protected void cleanCache(Calendar now) {
|
||||
super.cleanCache(now);
|
||||
contextsNoMeta = null;
|
||||
uuidToContextNoMeta = new LinkedHashMap<>();
|
||||
contextsMetaPrivacy = null;
|
||||
uuidToContextMetaPrivacy = new LinkedHashMap<>();
|
||||
uuidToContextMetaPrivacy = null;
|
||||
contextsNoMeta = null;
|
||||
uuidToContextNoMeta = null;
|
||||
}
|
||||
|
||||
public ServerContextCache() {
|
||||
super();
|
||||
cleanCache();
|
||||
Calendar now = Calendar.getInstance();
|
||||
cleanCache(now);
|
||||
initContextCacheRenewal();
|
||||
}
|
||||
|
||||
|
@ -85,7 +87,9 @@ public class ServerContextCache extends ContextCache {
|
|||
@Override
|
||||
public List<Context> renew() throws ResourceRegistryException {
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
String contextsJsonString = contextManagement.allFromServer(false);
|
||||
contextManagement.setForceOffset(0);
|
||||
contextManagement.setForceLimit(-1);
|
||||
String contextsJsonString = contextManagement.allFromDatabase(false);
|
||||
List<Context> contexts = null;
|
||||
try {
|
||||
contexts = ElementMapper.unmarshalList(contextsJsonString);
|
||||
|
@ -144,10 +148,16 @@ public class ServerContextCache extends ContextCache {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void setContexts(List<Context> contexts) {
|
||||
protected void setContexts(Calendar calendar, List<Context> contexts) {
|
||||
this.contexts = new ArrayList<>();
|
||||
this.contextsNoMeta = new ArrayList<>();
|
||||
this.uuidToContext = new LinkedHashMap<>();
|
||||
|
||||
this.contextsMetaPrivacy = new ArrayList<>();
|
||||
this.uuidToContextMetaPrivacy = new LinkedHashMap<>();
|
||||
|
||||
this.contextsNoMeta = new ArrayList<>();
|
||||
this.uuidToContextNoMeta = new LinkedHashMap<>();
|
||||
|
||||
|
||||
ObjectMapper objectMapper = ElementMapper.getObjectMapper();
|
||||
|
||||
|
@ -221,5 +231,12 @@ public class ServerContextCache extends ContextCache {
|
|||
this.contextFullNameToUUID.put(fullName, uuid);
|
||||
}
|
||||
|
||||
SortedSet<String> contextFullNames = new TreeSet<String>(contextFullNameToUUID.keySet());
|
||||
for(String contextFullName : contextFullNames) {
|
||||
UUID uuid = contextFullNameToUUID.get(contextFullName);
|
||||
Context context = uuidToContext.get(uuid);
|
||||
contextsTree.addNode(context);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.gcube.informationsystem.resourceregistry.contexts.entities;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -31,8 +32,10 @@ import org.gcube.informationsystem.resourceregistry.contexts.relations.IsParentO
|
|||
import org.gcube.informationsystem.resourceregistry.contexts.security.ContextSecurityContext;
|
||||
import org.gcube.informationsystem.resourceregistry.contexts.security.SecurityContext;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.base.entities.EntityElementManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.QueryConditionalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.QueryLogicalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.ComparisonOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.LogicalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
import org.gcube.informationsystem.serialization.ElementMapper;
|
||||
import org.gcube.informationsystem.types.reference.entities.EntityType;
|
||||
|
@ -56,6 +59,16 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
private static Logger logger = LoggerFactory.getLogger(ContextManagement.class);
|
||||
|
||||
protected String name;
|
||||
protected Integer forceOffset;
|
||||
protected Integer forceLimit;
|
||||
|
||||
public void setForceOffset(Integer forceOffset) {
|
||||
this.forceOffset = forceOffset;
|
||||
}
|
||||
|
||||
public void setForceLimit(Integer forceLimit) {
|
||||
this.forceLimit = forceLimit;
|
||||
}
|
||||
|
||||
private void init() {
|
||||
this.ignoreStartWithKeys.add(Context.PARENT_PROPERTY);
|
||||
|
@ -63,6 +76,8 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
this.typeName = Context.NAME;
|
||||
this.forceIncludeMeta = true;
|
||||
this.forceIncludeAllMeta = true;
|
||||
this.forceOffset = null;
|
||||
this.forceLimit = null;
|
||||
}
|
||||
|
||||
public ContextManagement() {
|
||||
|
@ -127,13 +142,13 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
select.append(parentId);
|
||||
select.append(" MAXDEPTH 1) WHERE ");
|
||||
select.append(Context.NAME_PROPERTY);
|
||||
select.append(QueryConditionalOperator.EQ.getConditionalOperator());
|
||||
select.append(ComparisonOperator.EQ.getDbOperator());
|
||||
select.append("\"");
|
||||
select.append(getName());
|
||||
select.append("\"");
|
||||
select.append(QueryLogicalOperator.AND.getLogicalOperator());
|
||||
select.append(LogicalOperator.AND.getDbOperator());
|
||||
select.append(IdentifiableElement.ID_PROPERTY);
|
||||
select.append(QueryConditionalOperator.NE.getConditionalOperator());
|
||||
select.append(ComparisonOperator.NE.getDbOperator());
|
||||
select.append("\"");
|
||||
select.append(parentContext.uuid);
|
||||
select.append("\"");
|
||||
|
@ -149,11 +164,11 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
select.append(Context.NAME);
|
||||
select.append(" WHERE ");
|
||||
select.append(Context.NAME_PROPERTY);
|
||||
select.append(QueryConditionalOperator.EQ.getConditionalOperator());
|
||||
select.append(ComparisonOperator.EQ.getDbOperator());
|
||||
select.append("\"");
|
||||
select.append(getName());
|
||||
select.append("\"");
|
||||
select.append(QueryLogicalOperator.AND.getLogicalOperator());
|
||||
select.append(LogicalOperator.AND.getDbOperator());
|
||||
select.append("in(\"");
|
||||
select.append(IsParentOf.NAME);
|
||||
select.append("\").size() = 0");
|
||||
|
@ -174,11 +189,18 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
|
||||
}
|
||||
|
||||
private JsonNode filterFieldsByRole(JsonNode context) {
|
||||
// TODO return extra info only to authorized users
|
||||
return context;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected JsonNode createCompleteJsonNode() throws ResourceRegistryException {
|
||||
|
||||
JsonNode context = serializeSelfAsJsonNode();
|
||||
|
||||
context = filterFieldsByRole(context);
|
||||
|
||||
int count = 0;
|
||||
Iterable<OEdge> parents = getElement().getEdges(ODirection.IN);
|
||||
for (OEdge edge : parents) {
|
||||
|
@ -220,6 +242,8 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
return context;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected OVertex reallyCreate() throws AlreadyPresentException, ResourceRegistryException {
|
||||
SecurityContext securityContext = null;
|
||||
|
@ -241,6 +265,7 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
uuid = UUIDManager.getInstance().generateValidUUID();
|
||||
}
|
||||
|
||||
logFullPath();
|
||||
createVertex();
|
||||
|
||||
IsParentOfManagement isParentOfManagement = new IsParentOfManagement(oDatabaseDocument);
|
||||
|
@ -252,6 +277,7 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
|
||||
} else {
|
||||
checkContext(null);
|
||||
logFullPath();
|
||||
createVertex();
|
||||
}
|
||||
|
||||
|
@ -275,6 +301,23 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO
|
||||
* The full path of the context is added for every non safe action
|
||||
* - At creation time
|
||||
* - At rename time (see #25139)
|
||||
* - At parent change time (see #26544)
|
||||
*
|
||||
* In this way we are sure to track context fullpath changes.
|
||||
* Furthermore when a context is delete and is mode to the cemetery
|
||||
* i.e. ShadowContextSecurityContext (see #19428) we do not have to do nothing with
|
||||
* the fullpath just move the vertex in the new SecurityContext
|
||||
* and add the instance of context deletion
|
||||
*/
|
||||
protected void logFullPath() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected OVertex reallyUpdate() throws NotFoundException, ResourceRegistryException {
|
||||
|
||||
|
@ -346,6 +389,10 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
move(newParentContextManagement, false);
|
||||
}
|
||||
|
||||
if (parentChanged || nameChanged) {
|
||||
logFullPath();
|
||||
}
|
||||
|
||||
element = (OVertex) updateProperties(oClass, getElement(), jsonNode, ignoreKeys, ignoreStartWithKeys);
|
||||
|
||||
ServerContextCache.getInstance().cleanCache();
|
||||
|
@ -391,6 +438,7 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
thisSecurityContext.changeParentSecurityContext(newParentSecurityContext, oDatabaseDocument);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void reallyDelete() throws NotFoundException, ResourceRegistryException {
|
||||
Iterable<OEdge> iterable = getElement().getEdges(ODirection.OUT);
|
||||
|
@ -399,6 +447,7 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
throw new ContextException("Cannot remove a " + Context.NAME + " having children");
|
||||
}
|
||||
|
||||
// TODO Move the vertex to the ShadowContextSecurityContext (i.e the cemetery)
|
||||
element.delete();
|
||||
|
||||
ContextUtility contextUtility = ContextUtility.getInstance();
|
||||
|
@ -412,8 +461,35 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
public String reallyGetAll(boolean polymorphic) throws ResourceRegistryException {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
Integer limit = requestInfo.getLimit();
|
||||
if(forceLimit!=null) {
|
||||
limit = forceLimit;
|
||||
}
|
||||
|
||||
if(limit == null) {
|
||||
limit = -1;
|
||||
}
|
||||
|
||||
Integer offset = requestInfo.getOffset();
|
||||
if(forceOffset!=null) {
|
||||
offset = forceOffset;
|
||||
}
|
||||
|
||||
if(offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
int position = -1;
|
||||
int count = 0;
|
||||
|
||||
Iterable<ODocument> iterable = oDatabaseDocument.browseClass(typeName, polymorphic);
|
||||
for (ODocument vertex : iterable) {
|
||||
if(++position < offset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
contextManagement.setForceIncludeMeta(forceIncludeMeta);
|
||||
contextManagement.setForceIncludeAllMeta(forceIncludeAllMeta);
|
||||
|
@ -421,6 +497,9 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
try {
|
||||
JsonNode jsonObject = contextManagement.serializeAsJsonNode();
|
||||
arrayNode.add(jsonObject);
|
||||
if(limit > 0 && ++count >= limit) {
|
||||
break;
|
||||
}
|
||||
} catch (ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
vertex.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
|
@ -433,7 +512,7 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
}
|
||||
}
|
||||
|
||||
public String allFromServer(boolean polymorphic) throws ResourceRegistryException {
|
||||
public String allFromDatabase(boolean polymorphic) throws ResourceRegistryException {
|
||||
return super.all(polymorphic);
|
||||
}
|
||||
|
||||
|
@ -442,9 +521,45 @@ public class ContextManagement extends EntityElementManagement<Context, EntityTy
|
|||
try {
|
||||
ServerContextCache contextCache = ServerContextCache.getInstance();
|
||||
List<Context> contexts = contextCache.getContexts();
|
||||
return ElementMapper.marshal(contexts);
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
Integer limit = requestInfo.getLimit();
|
||||
if(forceLimit!=null) {
|
||||
limit = forceLimit;
|
||||
}else if(limit == null) {
|
||||
limit = -1;
|
||||
}
|
||||
|
||||
Integer offset = requestInfo.getOffset();
|
||||
if(forceOffset!=null) {
|
||||
offset = forceOffset;
|
||||
}else if(offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
int position = -1;
|
||||
int count = 0;
|
||||
|
||||
if(offset==0 && limit<=0) {
|
||||
return ElementMapper.marshal(contexts);
|
||||
}
|
||||
|
||||
List<Context> requestedContexts = new ArrayList<>();
|
||||
for (Context c : contexts) {
|
||||
if(++position < offset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
requestedContexts.add(c);
|
||||
|
||||
if(limit > 0 && ++count >= limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return ElementMapper.marshal(requestedContexts);
|
||||
} catch (JsonProcessingException | ResourceRegistryException e) {
|
||||
return allFromServer(polymorphic);
|
||||
return allFromDatabase(polymorphic);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import com.orientechnologies.orient.core.metadata.security.ORule;
|
|||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* Added for feature #19428
|
||||
*/
|
||||
public class ShadowContextSecurityContext extends SecurityContext {
|
||||
|
||||
|
|
|
@ -209,7 +209,7 @@ public class DatabaseEnvironment {
|
|||
TypeManagement typeManagement = new TypeManagement();
|
||||
typeManagement.setTypeAndTypeName(clz);
|
||||
if(clz.equals(Property.class) || clz.equals(Metadata.class) ) {
|
||||
((TypeManagement) typeManagement).setSkipTypeDefinitionCreation(true);
|
||||
typeManagement.setSkipTypeDefinitionCreation(true);
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -224,7 +224,7 @@ public class DatabaseEnvironment {
|
|||
}
|
||||
|
||||
/*
|
||||
* We have already created Property and Metadata
|
||||
* We have already created Property and Metadata
|
||||
* because Metadata is needed to create
|
||||
* types for internal use (i.e. Context, EntityType).
|
||||
*
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.gcube.informationsystem.base.reference.IdentifiableElement;
|
|||
import org.gcube.informationsystem.model.reference.ERElement;
|
||||
import org.gcube.informationsystem.model.reference.ModelElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.Metadata;
|
||||
import org.gcube.informationsystem.model.reference.properties.Property;
|
||||
import org.gcube.informationsystem.model.reference.relations.Relation;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.AlreadyPresentException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.AvailableInAnotherContextException;
|
||||
|
@ -83,6 +82,7 @@ public abstract class ElementManagement<El extends OElement, T extends Type> {
|
|||
|
||||
public final static String AT = "@";
|
||||
public final static String UNDERSCORE = "_";
|
||||
public final static String DOLLAR = "$";
|
||||
|
||||
protected final Set<String> ignoreKeys;
|
||||
protected final Set<String> ignoreStartWithKeys;
|
||||
|
@ -148,8 +148,8 @@ public abstract class ElementManagement<El extends OElement, T extends Type> {
|
|||
protected Operation operation;
|
||||
|
||||
/**
|
||||
* A Delete operation has a cascade impact we could want to know the impact
|
||||
* Instances affected by a delete
|
||||
* A Delete an addToContext and a RemoveFromContext operation has a cascade impact
|
||||
* we want to know the impact, i.e. instances involved
|
||||
*/
|
||||
protected final Map<UUID,JsonNode> affectedInstances;
|
||||
|
||||
|
@ -166,6 +166,7 @@ public abstract class ElementManagement<El extends OElement, T extends Type> {
|
|||
this.ignoreStartWithKeys = new HashSet<String>();
|
||||
this.ignoreStartWithKeys.add(ElementManagement.AT);
|
||||
this.ignoreStartWithKeys.add(ElementManagement.UNDERSCORE);
|
||||
this.ignoreStartWithKeys.add(ElementManagement.DOLLAR);
|
||||
|
||||
this.reload = false;
|
||||
|
||||
|
@ -1253,7 +1254,7 @@ public abstract class ElementManagement<El extends OElement, T extends Type> {
|
|||
// TODO check a solution for supertypes
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<Property>> metadataType = (CachedType<PropertyType<Property>>) typesCache.getCachedType(Metadata.NAME);
|
||||
CachedType<PropertyType> metadataType = (CachedType<PropertyType>) typesCache.getCachedType(Metadata.NAME);
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
Collection<String> superClasses = metadataType.getSuperTypes();
|
||||
ArrayNode arrayNode = objectMapper.valueToTree(superClasses);
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
|
|||
import org.gcube.com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
import org.gcube.informationsystem.base.reference.properties.PropertyElement;
|
||||
import org.gcube.informationsystem.model.reference.ModelElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.Encrypted;
|
||||
import org.gcube.informationsystem.model.reference.properties.Metadata;
|
||||
|
@ -52,6 +51,7 @@ public class PropertyElementManagement {
|
|||
PROPERTY_IGNORE_START_WITH_KEYS = new HashSet<String>();
|
||||
PROPERTY_IGNORE_START_WITH_KEYS.add(ElementManagement.AT);
|
||||
PROPERTY_IGNORE_START_WITH_KEYS.add(ElementManagement.UNDERSCORE);
|
||||
PROPERTY_IGNORE_START_WITH_KEYS.add(ElementManagement.DOLLAR);
|
||||
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class PropertyElementManagement {
|
|||
try {
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<PropertyElement>> cachedType = (CachedType<PropertyType<PropertyElement>>) typesCache.getCachedType(type);
|
||||
CachedType<PropertyType> cachedType = (CachedType<PropertyType>) typesCache.getCachedType(type);
|
||||
oClass = cachedType.getOClass();
|
||||
AccessType gotAccessType = cachedType.getAccessType();
|
||||
if(!AccessType.PROPERTY_ELEMENT.getClass().isAssignableFrom(gotAccessType.getClass())) {
|
||||
|
@ -156,7 +156,7 @@ public class PropertyElementManagement {
|
|||
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<PropertyElement>> cachedType = (CachedType<PropertyType<PropertyElement>>) typesCache.getCachedType(type);
|
||||
CachedType<PropertyType> cachedType = (CachedType<PropertyType>) typesCache.getCachedType(type);
|
||||
OClass oClass = cachedType.getOClass();
|
||||
AccessType gotAccessType = cachedType.getAccessType();
|
||||
if(!AccessType.PROPERTY_ELEMENT.getClass().isAssignableFrom(gotAccessType.getClass())) {
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
package org.gcube.informationsystem.resourceregistry.instances.model.entities;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -26,7 +22,6 @@ import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegis
|
|||
import org.gcube.informationsystem.resourceregistry.api.exceptions.contexts.ContextException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.entities.EntityAlreadyPresentException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.queries.InvalidQueryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaViolationException;
|
||||
import org.gcube.informationsystem.resourceregistry.contexts.ContextUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.contexts.ServerContextCache;
|
||||
|
@ -38,14 +33,14 @@ import org.gcube.informationsystem.resourceregistry.instances.base.entities.Enti
|
|||
import org.gcube.informationsystem.resourceregistry.instances.model.ERManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.Operation;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.relations.RelationManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.types.TypesCache;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.MetadataUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
import org.gcube.informationsystem.types.reference.entities.EntityType;
|
||||
|
||||
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
|
||||
import com.orientechnologies.orient.core.id.ORID;
|
||||
import com.orientechnologies.orient.core.metadata.schema.OClass;
|
||||
import com.orientechnologies.orient.core.record.ODirection;
|
||||
import com.orientechnologies.orient.core.record.OEdge;
|
||||
import com.orientechnologies.orient.core.record.OElement;
|
||||
|
@ -154,8 +149,8 @@ public abstract class EntityManagement<E extends Entity, ET extends EntityType>
|
|||
this.relationManagements = new HashMap<>();
|
||||
|
||||
/*
|
||||
* By the default the system honour the propagation constraints
|
||||
* so this variable is initialised as true.
|
||||
* By the default the system honor the propagation constraints
|
||||
* so this variable is initialized as true.
|
||||
*/
|
||||
this.honourPropagationConstraintsInContextSharing = true;
|
||||
|
||||
|
@ -496,211 +491,33 @@ public abstract class EntityManagement<E extends Entity, ET extends EntityType>
|
|||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
int limit = requestInfo.getLimit();
|
||||
int offset = requestInfo.getOffset();
|
||||
|
||||
int position = -1;
|
||||
int count = 0;
|
||||
|
||||
Iterable<ODocument> iterable = oDatabaseDocument.browseClass(typeName, polymorphic);
|
||||
for(ODocument vertex : iterable) {
|
||||
if(++position < offset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
EntityManagement<?,?> entityManagement = ElementManagementUtility.getEntityManagement(getWorkingContext(),
|
||||
oDatabaseDocument, (OVertex) vertex);
|
||||
try {
|
||||
entityManagement.setAsEntryPoint();
|
||||
JsonNode jsonNode = entityManagement.serializeAsJsonNode();
|
||||
arrayNode.add(jsonNode);
|
||||
if(limit > 0 && ++count >= limit) {
|
||||
break;
|
||||
}
|
||||
} catch(ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
vertex.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
try {
|
||||
return objectMapper.writeValueAsString(arrayNode);
|
||||
} catch(JsonProcessingException e) {
|
||||
throw new ResourceRegistryException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean propertyMatchRequestedValue(OVertex v, String key, String requestedValue, Object instanceValue) throws SchemaException, ResourceRegistryException {
|
||||
return requestedValue.compareTo(instanceValue.toString())==0;
|
||||
|
||||
|
||||
/*
|
||||
OClass oClass = ElementManagement.getOClass(v);
|
||||
OProperty oProperty = oClass.getProperty(key);
|
||||
if(oProperty==null){
|
||||
// It is an additional property
|
||||
return requestedValue.compareTo(instanceValue.toString())==0;
|
||||
}
|
||||
OType oType = oProperty.getType();
|
||||
switch (oType) {
|
||||
case BOOLEAN:
|
||||
Boolean requested = Boolean.valueOf(requestedValue.toLowerCase());
|
||||
return requested == (Boolean) instanceValue;
|
||||
|
||||
case STRING:
|
||||
return requestedValue.compareTo((String) instanceValue)==0;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
public String reallyQuery(String relationType, String referenceType, UUID referenceUUID, ODirection direction,
|
||||
boolean polymorphic, Map<String,String> constraint, boolean includeRelationInResult) throws ResourceRegistryException {
|
||||
*/
|
||||
public String reallyQuery(String relationType, String referenceType, UUID referenceUUID, ODirection direction,
|
||||
boolean polymorphic, Map<String,String> constraint) throws ResourceRegistryException {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
||||
Iterable<?> references = null;
|
||||
|
||||
if(referenceUUID != null) {
|
||||
OElement element = null;
|
||||
try {
|
||||
element = ElementManagementUtility.getAnyElementByUUID(oDatabaseDocument, referenceUUID);
|
||||
}catch (ResourceRegistryException e) {
|
||||
String error = String.format("No instace with UUID %s exists", referenceUUID.toString());
|
||||
throw new InvalidQueryException(error);
|
||||
}
|
||||
|
||||
if(element instanceof OVertex) {
|
||||
EntityManagement<?, ?> entityManagement = ElementManagementUtility.getEntityManagement(getWorkingContext(),
|
||||
oDatabaseDocument, (OVertex) element);
|
||||
|
||||
String elementType = entityManagement.getTypeName();
|
||||
if(elementType.compareTo(referenceType) != 0) {
|
||||
if(polymorphic && getOClass().isSubClassOf(referenceType)) {
|
||||
// OK
|
||||
} else {
|
||||
String error = String.format("Referenced instace with UUID %s is not a %s", referenceUUID, referenceType);
|
||||
throw new InvalidQueryException(error);
|
||||
}
|
||||
}
|
||||
|
||||
List<OVertex> vertexes = new ArrayList<>();
|
||||
vertexes.add((OVertex) element);
|
||||
references = vertexes;
|
||||
|
||||
} else {
|
||||
String error = String.format("Referenced instace with UUID %s is not a %s", referenceUUID, referenceType);
|
||||
throw new InvalidQueryException(error);
|
||||
}
|
||||
|
||||
} else {
|
||||
references = oDatabaseDocument.browseClass(referenceType, polymorphic);
|
||||
}
|
||||
|
||||
Set<ORID> analysed = new HashSet<>();
|
||||
|
||||
for(Object r : references) {
|
||||
OVertex v = (OVertex) r;
|
||||
|
||||
boolean skip = false;
|
||||
// checking if the constraints are satisfied
|
||||
for(String key : constraint.keySet()) {
|
||||
String value = constraint.get(key);
|
||||
Object o = v.getProperty(key);
|
||||
if(value==null) {
|
||||
if(o==null) {
|
||||
//ok
|
||||
}else {
|
||||
skip = true;
|
||||
break;
|
||||
}
|
||||
}else {
|
||||
if(o==null) {
|
||||
// The vertex has not a required property to be tested
|
||||
// or the property is null
|
||||
skip = true;
|
||||
break;
|
||||
}else {
|
||||
skip = !propertyMatchRequestedValue(v, key, value, o);
|
||||
if(skip) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(skip) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
List<ODirection> directions = new ArrayList<>();
|
||||
if(direction==ODirection.BOTH) {
|
||||
directions.add(ODirection.IN);
|
||||
directions.add(ODirection.OUT);
|
||||
}else {
|
||||
directions.add(direction);
|
||||
}
|
||||
|
||||
for(ODirection d : directions) {
|
||||
|
||||
Iterable<OEdge> edges = v.getEdges(d.opposite(), relationType);
|
||||
for(OEdge edge : edges) {
|
||||
OVertex vertex = edge.getVertex(d);
|
||||
|
||||
ORID vertexORID = vertex.getIdentity();
|
||||
|
||||
if(analysed.contains(vertexORID)) {
|
||||
continue;
|
||||
}
|
||||
analysed.add(vertexORID);
|
||||
|
||||
if(v.getIdentity().compareTo(vertexORID) == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
OClass oClass = ElementManagementUtility.getOClass(vertex);
|
||||
|
||||
/*
|
||||
* If the requested type (i.e. elementType)
|
||||
* differs form the resulting type (i.e. oClass.getName())
|
||||
* we need to evaluate if polymorphism is requested and
|
||||
* if the resulting type is a subclass of the requested type
|
||||
*
|
||||
*/
|
||||
if(oClass.getName().compareTo(typeName)!=0) {
|
||||
if(polymorphic && oClass.isSubClassOf(typeName)) {
|
||||
// OK
|
||||
} else {
|
||||
// excluding from results
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
EntityManagement<?,?> entityManagement = ElementManagementUtility.getEntityManagement(getWorkingContext(),
|
||||
oDatabaseDocument, vertex);
|
||||
|
||||
try {
|
||||
if(referenceUUID!=null && entityManagement.getUUID().compareTo(referenceUUID) == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/*
|
||||
JsonNode jsonNode;
|
||||
if(includeRelationInResult) {
|
||||
RelationManagement<?,?> relationManagement = ElementManagementUtility.getRelationManagement(getWorkingContext(),
|
||||
oDatabaseDocument, edge);
|
||||
jsonNode = relationManagement.serializeAsJsonNode();
|
||||
}else {
|
||||
jsonNode = entityManagement.serializeAsJsonNode();
|
||||
}
|
||||
*/
|
||||
entityManagement.setAsEntryPoint();
|
||||
JsonNode node = entityManagement.serializeAsJsonNode();
|
||||
|
||||
arrayNode.add(node);
|
||||
} catch(ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
vertex.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return objectMapper.writeValueAsString(arrayNode);
|
||||
|
@ -709,7 +526,7 @@ public abstract class EntityManagement<E extends Entity, ET extends EntityType>
|
|||
}
|
||||
}
|
||||
|
||||
public String reallyQueryTraversal(String relationType, String referenceType, UUID referenceUUID,
|
||||
public String reallyQuery(String relationType, String referenceType, UUID referenceUUID,
|
||||
ODirection direction, boolean polymorphic, Map<String,String> constraint) throws ResourceRegistryException {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
@ -718,77 +535,89 @@ public abstract class EntityManagement<E extends Entity, ET extends EntityType>
|
|||
constraint.put(Entity.ID_PROPERTY, referenceUUID.toString());
|
||||
}
|
||||
|
||||
// TODO check types
|
||||
|
||||
/*
|
||||
* SELECT FROM (TRAVERSE inE('isIdentifiedBy'), outV('EService') FROM (SELECT
|
||||
* FROM SoftwareFacet WHERE group='VREManagement' AND name='SmartExecutor'))
|
||||
* SELECT FROM (
|
||||
* TRAVERSE outV('EService'), inE('isIdentifiedBy') FROM (
|
||||
* SELECT FROM SoftwareFacet WHERE group='VREManagement' AND name='SmartExecutor'
|
||||
* )
|
||||
* )
|
||||
*
|
||||
* WHERE type='EService' // Only is not polymorphic
|
||||
* WHERE @class INSTANCEOF 'EService' // if polymorphic is true
|
||||
*
|
||||
* WHERE @class='EService' // if polymorphic is false
|
||||
*/
|
||||
|
||||
StringBuilder selectStringBuilder = new StringBuilder("SELECT FROM (TRAVERSE ");
|
||||
selectStringBuilder.append(direction.name().toLowerCase());
|
||||
selectStringBuilder.append("E('");
|
||||
selectStringBuilder.append(relationType);
|
||||
selectStringBuilder.append("'), ");
|
||||
selectStringBuilder.append(direction.opposite().name().toLowerCase());
|
||||
selectStringBuilder.append("V('");
|
||||
selectStringBuilder.append(typeName);
|
||||
selectStringBuilder.append("') FROM (SELECT FROM ");
|
||||
selectStringBuilder.append(referenceType);
|
||||
StringBuffer selectStringBuffer = new StringBuffer();
|
||||
selectStringBuffer.append("SELECT FROM (TRAVERSE ");
|
||||
selectStringBuffer.append(direction.name().toLowerCase());
|
||||
selectStringBuffer.append("V('");
|
||||
selectStringBuffer.append(typeName);
|
||||
selectStringBuffer.append("')");
|
||||
|
||||
selectStringBuffer.append(", ");
|
||||
|
||||
selectStringBuffer.append(direction.opposite().name().toLowerCase());
|
||||
selectStringBuffer.append("E('");
|
||||
selectStringBuffer.append(relationType);
|
||||
selectStringBuffer.append("')");
|
||||
|
||||
selectStringBuffer.append(" FROM (SELECT FROM ");
|
||||
selectStringBuffer.append(referenceType);
|
||||
boolean first = true;
|
||||
for(String key : constraint.keySet()) {
|
||||
if(first) {
|
||||
selectStringBuilder.append(" WHERE ");
|
||||
selectStringBuffer.append(" WHERE ");
|
||||
first = false;
|
||||
} else {
|
||||
selectStringBuilder.append(" AND ");
|
||||
selectStringBuffer.append(" AND ");
|
||||
}
|
||||
selectStringBuilder.append(key);
|
||||
selectStringBuilder.append("=");
|
||||
selectStringBuffer.append(key);
|
||||
selectStringBuffer.append("=");
|
||||
String value = constraint.get(key).trim();
|
||||
selectStringBuilder.append("'");
|
||||
selectStringBuilder.append(value);
|
||||
selectStringBuilder.append("'");
|
||||
selectStringBuffer.append("'");
|
||||
selectStringBuffer.append(value);
|
||||
selectStringBuffer.append("'");
|
||||
}
|
||||
selectStringBuilder.append(" ))");
|
||||
selectStringBuffer.append(" ))");
|
||||
|
||||
if(!polymorphic) {
|
||||
selectStringBuilder.append(" WHERE type='");
|
||||
selectStringBuilder.append(typeName);
|
||||
selectStringBuilder.append("'");
|
||||
|
||||
selectStringBuffer.append(" WHERE @class");
|
||||
if(polymorphic) {
|
||||
selectStringBuffer.append(" INSTANCEOF '");
|
||||
} else {
|
||||
selectStringBuffer.append("='");
|
||||
}
|
||||
selectStringBuffer.append(typeName);
|
||||
selectStringBuffer.append("'");
|
||||
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
Integer limit = requestInfo.getLimit();
|
||||
if(limit==null) {
|
||||
limit = -1;
|
||||
}
|
||||
Integer offset = requestInfo.getOffset();
|
||||
if(offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
String select = selectStringBuilder.toString();
|
||||
selectStringBuffer.append(" SKIP :offset");
|
||||
selectStringBuffer.append(" LIMIT :limit");
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("offset", offset);
|
||||
map.put("limit", limit);
|
||||
|
||||
|
||||
String select = selectStringBuffer.toString();
|
||||
logger.trace(select);
|
||||
|
||||
OResultSet resultSet = oDatabaseDocument.command(select,new HashMap<>());
|
||||
OResultSet resultSet = oDatabaseDocument.command(select, map);
|
||||
|
||||
while(resultSet.hasNext()) {
|
||||
OResult oResult = resultSet.next();
|
||||
OElement element = ElementManagementUtility.getElementFromOptional(oResult.getElement());
|
||||
|
||||
if(polymorphic) {
|
||||
OClass oClass = null;
|
||||
try {
|
||||
if(element instanceof OEdge) {
|
||||
continue;
|
||||
}
|
||||
oClass = ElementManagementUtility.getOClass(element);
|
||||
} catch(Exception e) {
|
||||
String error = String.format("Unable to detect type of %s. %s", element.toString(),
|
||||
OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
logger.error(error, e);
|
||||
throw new ResourceRegistryException(error);
|
||||
}
|
||||
|
||||
if(oClass.isSubClassOf(typeName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
OVertex vertex = (OVertex) element;
|
||||
|
||||
EntityManagement<?,?> entityManagement = ElementManagementUtility.getEntityManagement(getWorkingContext(),
|
||||
|
@ -816,10 +645,6 @@ public abstract class EntityManagement<E extends Entity, ET extends EntityType>
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
public String query(String relationType, String referenceType, UUID referenceUUID, ODirection direction,
|
||||
boolean polymorphic, Map<String,String> constraint, boolean includeRelationInResult) throws ResourceRegistryException {
|
||||
*/
|
||||
public String query(String relationType, String referenceType, UUID referenceUUID, ODirection direction,
|
||||
boolean polymorphic, Map<String,String> constraint) throws ResourceRegistryException {
|
||||
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.gcube.informationsystem.resourceregistry.instances.model.Operation;
|
|||
import org.gcube.informationsystem.resourceregistry.instances.model.entities.EntityManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.entities.FacetManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.entities.ResourceManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.types.TypesCache;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.MetadataUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
|
@ -777,11 +779,23 @@ public abstract class RelationManagement<T extends EntityManagement<? extends En
|
|||
|
||||
}
|
||||
|
||||
protected Collection<JsonNode> serializeEdges(Iterable<ODocument> edges, boolean postFilterPolymorphic)
|
||||
private Collection<JsonNode> serializeEdges(Iterable<ODocument> edges, boolean postFilterPolymorphic)
|
||||
throws ResourceRegistryException {
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
int limit = requestInfo.getLimit();
|
||||
int offset = requestInfo.getOffset();
|
||||
|
||||
int position = -1;
|
||||
int count = 0;
|
||||
|
||||
// Map<String,JsonNode> visitedSourceResources = new HashMap<>();
|
||||
List<JsonNode> serilizedEdges = new ArrayList<>();
|
||||
for(ODocument d : edges) {
|
||||
if(++position < offset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
OEdge edge = (OEdge) d;
|
||||
|
||||
if(postFilterPolymorphic && getOClass().isSubClassOf(typeName)) {
|
||||
|
@ -792,6 +806,9 @@ public abstract class RelationManagement<T extends EntityManagement<? extends En
|
|||
oDatabaseDocument, edge);
|
||||
// visitedSourceResources = relationManagement.fullSerialize(visitedSourceResources);
|
||||
serilizedEdges.add(relationManagement.serializeAsJsonNode());
|
||||
if(limit > 0 && ++count >= limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return serilizedEdges;
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ public class QueryImpl implements Query {
|
|||
oDatabaseDocument = securityContext.getDatabaseDocument(PermissionMode.READER);
|
||||
oDatabaseDocument.begin();
|
||||
|
||||
logger.debug("Going to execute query '{} limit {}'", query);
|
||||
logger.debug("Going to execute query '{}'", query);
|
||||
|
||||
OResultSet resultSet = oDatabaseDocument.query(query);
|
||||
|
||||
|
|
|
@ -2,11 +2,14 @@ package org.gcube.informationsystem.resourceregistry.queries.json;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
import org.gcube.informationsystem.base.reference.Direction;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
|
@ -23,7 +26,8 @@ import org.gcube.informationsystem.resourceregistry.queries.json.base.entities.J
|
|||
import org.gcube.informationsystem.resourceregistry.queries.json.base.entities.JsonQueryResource;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.relations.JsonQueryConsistsOf;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.relations.JsonQueryIsRelatedTo;
|
||||
import org.gcube.informationsystem.resourceregistry.types.CachedType;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.types.TypesCache;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
import org.gcube.informationsystem.utils.TypeUtility;
|
||||
|
@ -42,8 +46,6 @@ public class JsonQuery {
|
|||
|
||||
private static Logger logger = LoggerFactory.getLogger(JsonQuery.class);
|
||||
|
||||
private static final Integer UNBOUNDED_LIMIT = -1;
|
||||
|
||||
protected ObjectMapper objectMapper;
|
||||
protected JsonNode jsonQuery;
|
||||
protected JsonQueryERElement entryPoint;
|
||||
|
@ -98,11 +100,17 @@ public class JsonQuery {
|
|||
return jsonQueryERElement;
|
||||
}
|
||||
|
||||
|
||||
public StringBuffer createQuery() throws SchemaException, InvalidQueryException, ResourceRegistryException {
|
||||
entryPoint = getJsonQueryERElement(jsonQuery);
|
||||
entryPoint.setEntryPoint(true);
|
||||
return entryPoint.analize(new StringBuffer());
|
||||
return entryPoint.createQuery(new StringBuffer());
|
||||
}
|
||||
|
||||
|
||||
public StringBuffer createMatchQuery() throws SchemaException, InvalidQueryException, ResourceRegistryException {
|
||||
entryPoint = getJsonQueryERElement(jsonQuery);
|
||||
entryPoint.setEntryPoint(true);
|
||||
return entryPoint.createMatchQuery(new StringBuffer());
|
||||
}
|
||||
|
||||
public String query() throws InvalidQueryException, ResourceRegistryException {
|
||||
|
@ -114,11 +122,24 @@ public class JsonQuery {
|
|||
oDatabaseDocument = securityContext.getDatabaseDocument(PermissionMode.READER);
|
||||
oDatabaseDocument.begin();
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
Integer limit = requestInfo.getLimit();
|
||||
if(limit==null) {
|
||||
limit = -1;
|
||||
}
|
||||
Integer offset = requestInfo.getOffset();
|
||||
if(offset == null) {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
StringBuffer stringBuffer = createQuery();
|
||||
stringBuffer.append(" limit :limit");
|
||||
stringBuffer.append(" SKIP :offset");
|
||||
stringBuffer.append(" LIMIT :limit");
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("limit", JsonQuery.UNBOUNDED_LIMIT);
|
||||
map.put("offset", offset);
|
||||
map.put("limit", limit);
|
||||
|
||||
|
||||
String query = stringBuffer.toString();
|
||||
logger.trace("Going to execute the following query:\n{} \n from the JSONQuery\n{}", query, objectMapper.writeValueAsString(jsonQuery));
|
||||
|
@ -127,39 +148,52 @@ public class JsonQuery {
|
|||
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
||||
boolean projection = entryPoint.isProjection();
|
||||
boolean first = true;
|
||||
|
||||
Set<String> keys = new HashSet<>();
|
||||
|
||||
while(resultSet.hasNext()) {
|
||||
OResult oResult = resultSet.next();
|
||||
OElement element = ElementManagementUtility.getElementFromOptional(oResult.getElement());
|
||||
|
||||
try {
|
||||
JsonNode jsonNodeResult = null;
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(securityContext, oDatabaseDocument,
|
||||
element);
|
||||
if(projection) {
|
||||
if(first) {
|
||||
keys = oResult.getPropertyNames();
|
||||
first = false;
|
||||
}
|
||||
ObjectNode objectNode = objectMapper.createObjectNode();
|
||||
|
||||
// To support polymorphism we do not include ="TypeName" in query. So we need post processing filtering of results
|
||||
for(String key : keys) {
|
||||
Object value = oResult.getProperty(key);
|
||||
|
||||
String requestedType = entryPoint.getType();
|
||||
String gotType = erManagement.getTypeName();
|
||||
if(value == null) {
|
||||
objectNode.put(key, "");
|
||||
}else if(value instanceof String) {
|
||||
objectNode.put(key, (String) value);
|
||||
}else if(value instanceof Integer) {
|
||||
objectNode.put(key, (Integer) value);
|
||||
}else if(value instanceof Long) {
|
||||
objectNode.put(key, (Long) value);
|
||||
}else {
|
||||
objectNode.put(key, value.toString());
|
||||
}
|
||||
|
||||
if(requestedType.compareTo(gotType)==0) {
|
||||
}
|
||||
arrayNode.add(objectNode);
|
||||
}else {
|
||||
OElement element = ElementManagementUtility.getElementFromOptional(oResult.getElement());
|
||||
|
||||
try {
|
||||
JsonNode jsonNodeResult = null;
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(securityContext, oDatabaseDocument,
|
||||
element);
|
||||
erManagement.setAsEntryPoint();
|
||||
jsonNodeResult = erManagement.serializeAsJsonNode();
|
||||
arrayNode.add(jsonNodeResult);
|
||||
continue;
|
||||
} catch(ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
element.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
}
|
||||
|
||||
CachedType<?> cachedType = TypesCache.getInstance().getCachedType(gotType);
|
||||
if(cachedType.getSuperTypes().contains(requestedType)) {
|
||||
erManagement.setAsEntryPoint();
|
||||
jsonNodeResult = erManagement.serializeAsJsonNode();
|
||||
arrayNode.add(jsonNodeResult);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
} catch(ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
element.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.json.base;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.InternalServerErrorException;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
|
@ -16,14 +20,18 @@ import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegis
|
|||
import org.gcube.informationsystem.resourceregistry.api.exceptions.queries.InvalidQueryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaNotFoundException;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.QueryConditionalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.QueryLogicalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.ComparisonOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.LogicalOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.MatemathicsOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.operators.ProjectionOperator;
|
||||
import org.gcube.informationsystem.resourceregistry.types.TypesCache;
|
||||
import org.gcube.informationsystem.utils.TypeUtility;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class JsonQueryERElement {
|
||||
|
||||
// private Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
protected Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
public static void validateType(String type, AccessType requiredAccessType) throws SchemaException, ResourceRegistryException {
|
||||
AccessType accessType = TypesCache.getInstance().getCachedType(type).getAccessType();
|
||||
|
@ -42,12 +50,69 @@ public abstract class JsonQueryERElement {
|
|||
protected Direction direction;
|
||||
protected boolean entryPoint;
|
||||
|
||||
/* Start of variables used to create in MATCH queries */
|
||||
/**
|
||||
* Instruct the JSON query analyzer if it is a projection
|
||||
*/
|
||||
protected boolean projection;
|
||||
|
||||
/**
|
||||
* The chain of callers of this instance analyzer.
|
||||
* breadcrumb.get(breadcrumb.size-1) == caller
|
||||
* breadcrumb is empty if this instance is an entry point
|
||||
*/
|
||||
protected List<JsonQueryERElement> breadcrumb;
|
||||
/**
|
||||
* The breadcrumb.size() provide the level of nesting.
|
||||
* The position which element number at the same level.
|
||||
* E.g. the index of a ConsistsOf when the caller is a Resource.
|
||||
*
|
||||
* It is used in conjunction with breadcrumb.size() to attach a number to the alias.
|
||||
* This allows to generate a predictive not clashing alias instead of using random string/number.
|
||||
* This is useful for testing purposes.
|
||||
*/
|
||||
protected int position;
|
||||
|
||||
/**
|
||||
* Contains the alias if needed by the class
|
||||
*/
|
||||
protected String alias;
|
||||
|
||||
/**
|
||||
* This field is used by entry point only.
|
||||
* Any class at any level can retrieve the entry point using
|
||||
* breadcrumb.get(0);
|
||||
*/
|
||||
protected List<String> fieldsToEmit;
|
||||
/* Start of variables used to create in MATCH queries */
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* it indicates the number of properties in this.jsonNode
|
||||
* This number is manipulated while analyzing the jsonNode
|
||||
* to properly create the query.
|
||||
*/
|
||||
protected int size;
|
||||
|
||||
protected boolean traverseBack;
|
||||
|
||||
public JsonQueryERElement(JsonNode jsonQuery, AccessType accessType) throws SchemaException, ResourceRegistryException {
|
||||
this.objectMapper = new ObjectMapper();
|
||||
this.type = TypeUtility.getTypeName(jsonQuery);
|
||||
this.jsonNode = jsonQuery;
|
||||
this.size = jsonNode.size();
|
||||
this.accessType = accessType;
|
||||
this.entryPoint = false;
|
||||
this.traverseBack = true;
|
||||
|
||||
this.projection = false;
|
||||
this.breadcrumb = new ArrayList<>();
|
||||
this.position = 0;
|
||||
this.alias = null;
|
||||
this.fieldsToEmit = new ArrayList<>();
|
||||
|
||||
|
||||
this.fieldNamesToRemove = new HashSet<>();
|
||||
this.fieldNamesToRemove.add(Element.TYPE_PROPERTY);
|
||||
|
@ -75,16 +140,93 @@ public abstract class JsonQueryERElement {
|
|||
|
||||
public void setEntryPoint(boolean entryPoint) {
|
||||
this.entryPoint = entryPoint;
|
||||
this.traverseBack = !entryPoint;
|
||||
}
|
||||
|
||||
public abstract StringBuffer analize(StringBuffer stringBuffer) throws SchemaNotFoundException, InvalidQueryException, SchemaException, ResourceRegistryException;
|
||||
public boolean isProjection() {
|
||||
return projection;
|
||||
}
|
||||
|
||||
public void setProjection(boolean projection) {
|
||||
if(!projection) {
|
||||
throw new InternalServerErrorException("Projection can only be set to true from code. This is a server side bug. Please contact the administrator.");
|
||||
}
|
||||
this.projection = projection;
|
||||
if(!entryPoint) {
|
||||
// Set the projection in the parent
|
||||
breadcrumb.get(breadcrumb.size()-2).setProjection(projection);
|
||||
}
|
||||
}
|
||||
|
||||
protected StringBuffer addConstraints(JsonNode jsonNode, QueryLogicalOperator queryLogicalOperator, String fieldNamePrefix) throws InvalidQueryException {
|
||||
public List<JsonQueryERElement> getBreadcrumb() {
|
||||
return breadcrumb;
|
||||
}
|
||||
|
||||
public void setBreadcrumb(List<JsonQueryERElement> breadcrumb) {
|
||||
this.breadcrumb = breadcrumb;
|
||||
}
|
||||
|
||||
public int getPosition() {
|
||||
return position;
|
||||
}
|
||||
|
||||
public void setPosition(int position) {
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String getAlias(boolean generateifNull) {
|
||||
if(alias==null && generateifNull) {
|
||||
alias = generateAlias().toString();
|
||||
}
|
||||
return alias;
|
||||
}
|
||||
|
||||
protected StringBuffer generateAlias() {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(type.toLowerCase());
|
||||
for(JsonQueryERElement elem : breadcrumb) {
|
||||
sb.append(elem.getPosition());
|
||||
}
|
||||
sb.append(this.position);
|
||||
return sb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a field to emit only if this instance is the entry point
|
||||
* @param fieldToEmit
|
||||
*/
|
||||
protected void addFieldToEmit(String fieldToEmit) {
|
||||
fieldsToEmit.add(fieldToEmit);
|
||||
logger.trace("The field to emit ({}) has been added to {} with alias {}", fieldToEmit, this.type, this.alias);
|
||||
if(!entryPoint) {
|
||||
logger.trace("The field to emit ({}) will be added to the parent too", fieldToEmit);
|
||||
breadcrumb.get(breadcrumb.size()-2).addFieldToEmit(fieldToEmit);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isTraverseBack() {
|
||||
return traverseBack;
|
||||
}
|
||||
|
||||
public void setTraverseBack(boolean traverseBack) {
|
||||
this.traverseBack = traverseBack;
|
||||
}
|
||||
|
||||
public StringBuffer createQuery(StringBuffer stringBuffer) throws SchemaNotFoundException, InvalidQueryException, SchemaException, ResourceRegistryException {
|
||||
return createMatchQuery(stringBuffer);
|
||||
}
|
||||
|
||||
public abstract StringBuffer createTraversalQuery(StringBuffer stringBuffer) throws SchemaNotFoundException, InvalidQueryException, SchemaException, ResourceRegistryException;
|
||||
|
||||
protected StringBuffer addConstraints(JsonNode jsonNode, LogicalOperator queryLogicalOperator, String fieldNamePrefix) throws InvalidQueryException {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
|
||||
if(queryLogicalOperator==null) {
|
||||
queryLogicalOperator = QueryLogicalOperator.AND;
|
||||
queryLogicalOperator = LogicalOperator.AND;
|
||||
}
|
||||
|
||||
JsonNode copiedJsonNode = jsonNode.deepCopy();
|
||||
|
@ -99,13 +241,17 @@ public abstract class JsonQueryERElement {
|
|||
|
||||
while(iterator.hasNext()) {
|
||||
String fieldName = iterator.next();
|
||||
if(first) {
|
||||
first = false;
|
||||
}else {
|
||||
stringBuffer.append(queryLogicalOperator.getLogicalOperator());
|
||||
}
|
||||
JsonNode node = objectNode.get(fieldName);
|
||||
stringBuffer.append(evaluateNode(node, fieldName, fieldNamePrefix));
|
||||
StringBuffer evBuffer = evaluateNode(node, fieldName, fieldNamePrefix);
|
||||
|
||||
if(evBuffer!=null && evBuffer.length()>0) {
|
||||
if(first) {
|
||||
first = false;
|
||||
}else {
|
||||
stringBuffer.append(queryLogicalOperator.getDbOperator());
|
||||
}
|
||||
stringBuffer.append(evBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,13 +260,19 @@ public abstract class JsonQueryERElement {
|
|||
Iterator<JsonNode> iterator = arrayNode.iterator();
|
||||
boolean first = true;
|
||||
while(iterator.hasNext()) {
|
||||
if(first) {
|
||||
first = false;
|
||||
}else {
|
||||
stringBuffer.append(queryLogicalOperator.getLogicalOperator());
|
||||
}
|
||||
JsonNode node = iterator.next();
|
||||
stringBuffer.append(evaluateNode(node, null, fieldNamePrefix));
|
||||
StringBuffer evBuffer = evaluateNode(node, null, fieldNamePrefix);
|
||||
|
||||
if(!first) {
|
||||
stringBuffer.append(queryLogicalOperator.getDbOperator());
|
||||
}
|
||||
|
||||
if(evBuffer!=null && evBuffer.length()>0) {
|
||||
if(first) {
|
||||
first = false;
|
||||
}
|
||||
stringBuffer.append(evBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,26 +280,56 @@ public abstract class JsonQueryERElement {
|
|||
}
|
||||
|
||||
protected StringBuffer evaluateNode(JsonNode jsonNode, String fieldName, String fieldNamePrefix) throws InvalidQueryException {
|
||||
|
||||
if(ProjectionOperator.getOperators().contains(fieldName)) {
|
||||
--size;
|
||||
setProjection(true);
|
||||
Iterator<String> iterator = jsonNode.fieldNames();
|
||||
while(iterator.hasNext()) {
|
||||
String fieldNameToEmit = iterator.next();
|
||||
String nameOfFieldToEmit = jsonNode.get(fieldNameToEmit).asText();
|
||||
StringBuffer b = new StringBuffer();
|
||||
b.append(getAlias(true));
|
||||
b.append(".");
|
||||
if(fieldNamePrefix !=null) {
|
||||
b.append(fieldNamePrefix);
|
||||
b.append(".");
|
||||
}
|
||||
b.append(fieldNameToEmit);
|
||||
b.append(" AS `");
|
||||
b.append(nameOfFieldToEmit);
|
||||
b.append("`");
|
||||
addFieldToEmit(b.toString());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if(MatemathicsOperator.getOperators().contains(fieldName)) {
|
||||
--size;
|
||||
setProjection(true);
|
||||
MatemathicsOperator mo = MatemathicsOperator.getOperator(fieldName);
|
||||
String fieldToEmit = mo.generateFieldToEmit(jsonNode, getAlias(true));
|
||||
addFieldToEmit(fieldToEmit);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
|
||||
if(QueryLogicalOperator.getOperators().contains(fieldName)) {
|
||||
QueryLogicalOperator queryLogicalOperator = QueryLogicalOperator.getQueryLogicalOperator(fieldName);
|
||||
if(LogicalOperator.getOperators().contains(fieldName)) {
|
||||
LogicalOperator queryLogicalOperator = LogicalOperator.getOperator(fieldName);
|
||||
stringBuffer.append("(");
|
||||
stringBuffer.append(addConstraints(jsonNode, queryLogicalOperator, fieldNamePrefix));
|
||||
stringBuffer.append(")");
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
if(QueryConditionalOperator.getOperators().contains(fieldName)) {
|
||||
QueryConditionalOperator queryConditionalOperator = QueryConditionalOperator.getQueryComparisonOperator(fieldName);
|
||||
if(ComparisonOperator.getOperators().contains(fieldName)) {
|
||||
ComparisonOperator comparisonOperator = ComparisonOperator.getOperator(fieldName);
|
||||
|
||||
if(queryConditionalOperator == QueryConditionalOperator.IN) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
StringBuffer key = getKey(null, fieldNamePrefix);
|
||||
StringBuffer value = getValue(jsonNode);
|
||||
stringBuffer.append(addCondition(queryConditionalOperator, key, value));
|
||||
String key = getKey(null, fieldNamePrefix);
|
||||
String value = getValue(jsonNode);
|
||||
stringBuffer.append(comparisonOperator.addCondition(key, value));
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
|
@ -169,38 +351,34 @@ public abstract class JsonQueryERElement {
|
|||
}
|
||||
|
||||
if(jsonNode.isTextual() || jsonNode.isNumber()) {
|
||||
StringBuffer key = getKey(fieldName, fieldNamePrefix);
|
||||
StringBuffer value = getValue(jsonNode);
|
||||
stringBuffer.append(addCondition(QueryConditionalOperator.EQ, key, value));
|
||||
String key = getKey(fieldName, fieldNamePrefix);
|
||||
String value = getValue(jsonNode);
|
||||
stringBuffer.append(ComparisonOperator.EQ.addCondition(key, value));
|
||||
}
|
||||
|
||||
if(jsonNode.isNull()) {
|
||||
String key = getKey(fieldName, null);
|
||||
stringBuffer.append(ComparisonOperator.IS.addCondition(key, null));
|
||||
}
|
||||
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
protected StringBuffer addCondition(QueryConditionalOperator queryConditionalOperator, StringBuffer key, StringBuffer value) {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
stringBuffer.append(key);
|
||||
stringBuffer.append(queryConditionalOperator.getConditionalOperator());
|
||||
stringBuffer.append(value);
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
|
||||
protected StringBuffer getKey(String fieldName, String fieldNamePrefix) {
|
||||
protected String getKey(String fieldName, String fieldNamePrefix) {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
if(fieldNamePrefix!=null) {
|
||||
stringBuffer.append(fieldNamePrefix);
|
||||
if(fieldName!=null && fieldName.compareTo("")!=0) {
|
||||
stringBuffer.append(fieldNamePrefix.trim());
|
||||
if(fieldName!=null && fieldName.trim().length()!=0) {
|
||||
stringBuffer.append(".");
|
||||
}
|
||||
}
|
||||
if(fieldName!=null) {
|
||||
stringBuffer.append(fieldName);
|
||||
stringBuffer.append(fieldName.trim());
|
||||
}
|
||||
return stringBuffer;
|
||||
return stringBuffer.toString();
|
||||
}
|
||||
|
||||
protected StringBuffer getValue(JsonNode jsonNode) {
|
||||
protected String getValue(JsonNode jsonNode) {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
|
||||
String value = jsonNode.asText();
|
||||
|
@ -211,6 +389,92 @@ public abstract class JsonQueryERElement {
|
|||
stringBuffer.append(value);
|
||||
stringBuffer.append("\"");
|
||||
}
|
||||
return stringBuffer.toString();
|
||||
}
|
||||
|
||||
protected List<JsonQueryERElement> getChildrenBreadcrumb() {
|
||||
List<JsonQueryERElement> childrenBreadcrumb = new ArrayList<>(this.breadcrumb);
|
||||
childrenBreadcrumb.add(this);
|
||||
return childrenBreadcrumb;
|
||||
}
|
||||
|
||||
protected StringBuffer wrapMatchQuery(StringBuffer buffer) throws InvalidQueryException {
|
||||
if(entryPoint) {
|
||||
alias = getAlias(true);
|
||||
|
||||
StringBuffer sb = null;
|
||||
if(size > 1) {
|
||||
sb = addConstraints(jsonNode, null, null);
|
||||
}
|
||||
|
||||
StringBuffer entryBuffer = new StringBuffer();
|
||||
entryBuffer.append("MATCH\n");
|
||||
entryBuffer.append("\t{class: "); // The { has to be closed
|
||||
entryBuffer.append(type);
|
||||
entryBuffer.append(", as: ");
|
||||
entryBuffer.append(alias);
|
||||
entryBuffer.append(", where: ");
|
||||
if(sb!=null && sb.length()>0) {
|
||||
entryBuffer.append("(");
|
||||
}
|
||||
entryBuffer.append("($currentMatch['@class'] INSTANCEOF '");
|
||||
entryBuffer.append(type);
|
||||
entryBuffer.append("')"); // close the second (
|
||||
|
||||
if(sb!=null && sb.length()>0) {
|
||||
entryBuffer.append(" AND (");
|
||||
entryBuffer.append(sb);
|
||||
entryBuffer.append(")");
|
||||
entryBuffer.append(")");
|
||||
}
|
||||
|
||||
entryBuffer.append("}\n");
|
||||
entryBuffer.append(buffer);
|
||||
|
||||
entryBuffer.append("\nRETURN\n");
|
||||
|
||||
if(!projection) {
|
||||
entryBuffer.append("\tDISTINCT(");
|
||||
entryBuffer.append(alias);
|
||||
entryBuffer.append(") as ret");
|
||||
|
||||
StringBuffer wrap = new StringBuffer();
|
||||
wrap.append("SELECT EXPAND(ret) FROM (\n");
|
||||
wrap.append(entryBuffer);
|
||||
wrap.append("\n)");
|
||||
|
||||
entryBuffer = wrap;
|
||||
}else {
|
||||
int size = fieldsToEmit.size();
|
||||
for(int i=0; i<size; i++) {
|
||||
entryBuffer.append("\t");
|
||||
entryBuffer.append(fieldsToEmit.get(i));
|
||||
if(i<(size-1)) {
|
||||
entryBuffer.append(",\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
return entryBuffer;
|
||||
}else {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected abstract StringBuffer getSpecificMatchQuery(List<JsonQueryERElement> childrenBreadcrumb) throws SchemaException, ResourceRegistryException;
|
||||
|
||||
public StringBuffer createMatchQuery(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
List<JsonQueryERElement> childrenBreadcrumb = getChildrenBreadcrumb();
|
||||
|
||||
if(entryPoint) {
|
||||
getAlias(true);
|
||||
}
|
||||
|
||||
StringBuffer buffer = getSpecificMatchQuery(childrenBreadcrumb);
|
||||
buffer = wrapMatchQuery(buffer);
|
||||
|
||||
stringBuffer.append(buffer);
|
||||
|
||||
return stringBuffer;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,42 +1,46 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.json.base.entities;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
import org.gcube.informationsystem.base.reference.Direction;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.queries.InvalidQueryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.JsonQueryERElement;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.relations.JsonQueryConsistsOf;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class JsonQueryFacet extends JsonQueryEntity {
|
||||
|
||||
public final static String _IN = "_in";
|
||||
public final static String _SOURCE = "_source";
|
||||
|
||||
public JsonQueryFacet(JsonNode jsonQuery) throws SchemaException, ResourceRegistryException {
|
||||
super(jsonQuery, AccessType.FACET);
|
||||
fieldNamesToRemove.add(JsonQueryFacet._IN);
|
||||
fieldNamesToRemove.add(JsonQueryFacet._SOURCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StringBuffer analize(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
public StringBuffer createTraversalQuery(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
|
||||
int size = jsonNode.size();
|
||||
|
||||
boolean entry = entryPoint;
|
||||
if(jsonNode.has(_IN)) {
|
||||
boolean traverseBackLocal = traverseBack;
|
||||
if(jsonNode.has(_SOURCE)) {
|
||||
if(!entryPoint) {
|
||||
throw new InvalidQueryException(_IN + " property cannot be used in a facet if it is not the entry object");
|
||||
throw new InvalidQueryException(_SOURCE + " property cannot be used in a facet if it is not the entry object");
|
||||
}
|
||||
JsonNode consistsOfNode = jsonNode.get(_IN);
|
||||
JsonNode consistsOfNode = jsonNode.get(_SOURCE);
|
||||
JsonQueryConsistsOf jsonQueryConsistsOf = new JsonQueryConsistsOf(consistsOfNode);
|
||||
jsonQueryConsistsOf.setEntryPoint(entryPoint);
|
||||
jsonQueryConsistsOf.setTraverseBack(traverseBackLocal);
|
||||
jsonQueryConsistsOf.setDirection(Direction.OUT);
|
||||
stringBuffer = jsonQueryConsistsOf.analize(stringBuffer);
|
||||
entry = false;
|
||||
stringBuffer = jsonQueryConsistsOf.createTraversalQuery(stringBuffer);
|
||||
traverseBackLocal = true;
|
||||
|
||||
/* Need to substract 1 from size otherwise
|
||||
* it add WHERE at the end because _in
|
||||
|
@ -47,28 +51,134 @@ public class JsonQueryFacet extends JsonQueryEntity {
|
|||
|
||||
newBuffer.append("SELECT FROM ");
|
||||
|
||||
if(!entry) {
|
||||
if(traverseBackLocal) {
|
||||
newBuffer.append("( ");
|
||||
newBuffer.append("TRAVERSE inV(\"");
|
||||
}
|
||||
|
||||
newBuffer.append(type);
|
||||
|
||||
if(!entry) {
|
||||
if(traverseBackLocal) {
|
||||
newBuffer.append("\") FROM ( ");
|
||||
newBuffer.append(stringBuffer);
|
||||
newBuffer.append(")");
|
||||
newBuffer.append(")");
|
||||
}
|
||||
|
||||
/*
|
||||
* If size >1 I have to add constraints.
|
||||
* If is an entry point I have to add the INSTANCEOF to properly support polymorphism
|
||||
*/
|
||||
if(size > 1 || entryPoint) {
|
||||
newBuffer.append(" WHERE ");
|
||||
}
|
||||
|
||||
// Size 1 means that only 'type' property is present
|
||||
if(size > 1) {
|
||||
newBuffer.append(" WHERE ");
|
||||
newBuffer.append(addConstraints(jsonNode, null, null));
|
||||
if(entryPoint) {
|
||||
newBuffer.append(" AND ");
|
||||
}
|
||||
}
|
||||
|
||||
if(entryPoint) {
|
||||
newBuffer.append(OrientDBUtility.ORIENTDB_CLASS_PROPERTY);
|
||||
newBuffer.append(" INSTANCEOF \"");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("\"");
|
||||
}
|
||||
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StringBuffer getSpecificMatchQuery(List<JsonQueryERElement> childrenBreadcrumb)
|
||||
throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
|
||||
boolean traverseBack = this.traverseBack;
|
||||
if(jsonNode.has(_SOURCE)) {
|
||||
if(!entryPoint) {
|
||||
throw new InvalidQueryException(_SOURCE + " property cannot be used in a facet if it is not the entry object");
|
||||
}
|
||||
traverseBack = true;
|
||||
|
||||
JsonNode consistsOfNode = jsonNode.get(_SOURCE);
|
||||
JsonQueryConsistsOf jsonQueryConsistsOf = new JsonQueryConsistsOf(consistsOfNode);
|
||||
jsonQueryConsistsOf.setTraverseBack(traverseBack);
|
||||
jsonQueryConsistsOf.setDirection(Direction.IN);
|
||||
jsonQueryConsistsOf.setBreadcrumb(childrenBreadcrumb);
|
||||
newBuffer = jsonQueryConsistsOf.createMatchQuery(newBuffer);
|
||||
|
||||
newBuffer.append("\n\t");
|
||||
newBuffer.append(".inV('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("')");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(alias);
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
traverseBack = false;
|
||||
|
||||
/* Need to substract 1 from size otherwise
|
||||
* it add WHERE at the end because _in
|
||||
* is not a property to be used for a WHERE compare
|
||||
*/
|
||||
--size;
|
||||
}
|
||||
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
|
||||
if(!entryPoint) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".inV('");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
|
||||
alias = getAlias(true);
|
||||
StringBuffer sb = null;
|
||||
if(size > 0) {
|
||||
sb = addConstraints(jsonNode, null, null);
|
||||
}
|
||||
|
||||
buffer.append(" {");
|
||||
buffer.append(" as: ");
|
||||
buffer.append(alias);
|
||||
buffer.append(",");
|
||||
buffer.append(" where: ");
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append("(");
|
||||
}
|
||||
buffer.append("($currentMatch['@class'] INSTANCEOF '");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append(" AND (");
|
||||
buffer.append(sb);
|
||||
buffer.append(")");
|
||||
buffer.append(")");
|
||||
}
|
||||
|
||||
buffer.append("}");
|
||||
|
||||
}
|
||||
|
||||
buffer.append(newBuffer);
|
||||
|
||||
// if(traverseBack) {
|
||||
// buffer.append("\n\t");
|
||||
// buffer.append(".inV('");
|
||||
// buffer.append(type);
|
||||
// buffer.append("')");
|
||||
// buffer.append(" { where: ($matched.");
|
||||
// buffer.append(alias);
|
||||
// buffer.append(" == $currentMatch)}");
|
||||
// }
|
||||
|
||||
return buffer;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.json.base.entities;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
|
@ -7,8 +9,10 @@ import org.gcube.informationsystem.base.reference.Direction;
|
|||
import org.gcube.informationsystem.model.reference.entities.Resource;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.JsonQueryERElement;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.relations.JsonQueryConsistsOf;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.relations.JsonQueryIsRelatedTo;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
|
@ -17,42 +21,74 @@ public class JsonQueryResource extends JsonQueryEntity {
|
|||
|
||||
public JsonQueryResource(JsonNode jsonQuery) throws SchemaException, ResourceRegistryException {
|
||||
super(jsonQuery, AccessType.RESOURCE);
|
||||
fieldNamesToRemove.add(Resource.CONSISTS_OF_PROPERTY);
|
||||
fieldNamesToRemove.add(Resource.IS_RELATED_TO_PROPERTY);
|
||||
this.fieldNamesToRemove.add(Resource.CONSISTS_OF_PROPERTY);
|
||||
this.fieldNamesToRemove.add(Resource.IS_RELATED_TO_PROPERTY);
|
||||
}
|
||||
|
||||
public StringBuffer createSelect(StringBuffer stringBuffer, boolean wrapInnerQuery) throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("SELECT FROM ");
|
||||
|
||||
if(wrapInnerQuery) {
|
||||
buffer.append("( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
}else {
|
||||
buffer.append(type);
|
||||
}
|
||||
|
||||
if(entryPoint || size>1) {
|
||||
buffer.append(" WHERE ");
|
||||
}
|
||||
|
||||
if(size > 1) {
|
||||
buffer.append(addConstraints(jsonNode, null, null));
|
||||
if(entryPoint) {
|
||||
buffer.append(" AND ");
|
||||
}
|
||||
}
|
||||
|
||||
if(entryPoint) {
|
||||
buffer.append(OrientDBUtility.ORIENTDB_CLASS_PROPERTY);
|
||||
buffer.append(" INSTANCEOF \"");
|
||||
buffer.append(type);
|
||||
buffer.append("\"");
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StringBuffer analize(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
boolean initFound = false;
|
||||
public StringBuffer createTraversalQuery(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
|
||||
int size = jsonNode.size();
|
||||
boolean wrapInnerQuery = false;
|
||||
|
||||
if(!entryPoint) {
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
newBuffer.append("TRAVERSE ");
|
||||
newBuffer.append(direction.name().toLowerCase());
|
||||
newBuffer.append("V(\"");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("\") FROM ( ");
|
||||
newBuffer.append(stringBuffer);
|
||||
newBuffer.append(")");
|
||||
stringBuffer = newBuffer;
|
||||
if(traverseBack) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("V(\"");
|
||||
buffer.append(type);
|
||||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
|
||||
initFound = true;
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
ArrayNode isRelatedToArray = (ArrayNode) jsonNode.get(Resource.IS_RELATED_TO_PROPERTY);
|
||||
if(isRelatedToArray!=null && isRelatedToArray.size()>0) {
|
||||
--size;
|
||||
initFound = true;
|
||||
for(int i=0; i<isRelatedToArray.size(); i++) {
|
||||
JsonNode isRelatedToJsonNode = isRelatedToArray.get(i);
|
||||
JsonQueryIsRelatedTo jsonQueryIsRelatedTo = new JsonQueryIsRelatedTo(isRelatedToJsonNode);
|
||||
jsonQueryIsRelatedTo.setRequestedResourceType(type);
|
||||
jsonQueryIsRelatedTo.setEntryPoint(entryPoint && i==0);
|
||||
stringBuffer = jsonQueryIsRelatedTo.analize(stringBuffer);
|
||||
jsonQueryIsRelatedTo.setDirectionByJson();
|
||||
jsonQueryIsRelatedTo.setTraverseBack( (!(!traverseBack) && i==0) );
|
||||
stringBuffer = jsonQueryIsRelatedTo.createTraversalQuery(stringBuffer);
|
||||
}
|
||||
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
ArrayNode consistsOfArray = (ArrayNode) jsonNode.get(Resource.CONSISTS_OF_PROPERTY);
|
||||
|
@ -63,45 +99,154 @@ public class JsonQueryResource extends JsonQueryEntity {
|
|||
JsonQueryConsistsOf jsonQueryConsistsOf = new JsonQueryConsistsOf(consistsOfJsonNode);
|
||||
jsonQueryConsistsOf.setRequestedResourceType(type);
|
||||
jsonQueryConsistsOf.setDirection(Direction.IN);
|
||||
jsonQueryConsistsOf.setEntryPoint(entryPoint && !initFound && i==0);
|
||||
stringBuffer = jsonQueryConsistsOf.analize(stringBuffer);
|
||||
jsonQueryConsistsOf.setTraverseBack(!((!traverseBack) && !wrapInnerQuery && i==0));
|
||||
stringBuffer = jsonQueryConsistsOf.createTraversalQuery(stringBuffer);
|
||||
}
|
||||
initFound = true; // Must be set after the cycle and not before
|
||||
wrapInnerQuery = true; // Must be set after the cycle and not before
|
||||
}
|
||||
|
||||
if(entryPoint) {
|
||||
if(!initFound) {
|
||||
stringBuffer = new StringBuffer();
|
||||
stringBuffer.append("SELECT FROM ");
|
||||
stringBuffer.append(type);
|
||||
if(size > 1) {
|
||||
stringBuffer.append(" WHERE ");
|
||||
stringBuffer.append(addConstraints(jsonNode, null, null));
|
||||
}
|
||||
}else {
|
||||
if(size > 1) {
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
newBuffer.append("SELECT FROM ( ");
|
||||
newBuffer.append(stringBuffer);
|
||||
newBuffer.append(")");
|
||||
newBuffer.append(" WHERE ");
|
||||
newBuffer.append(addConstraints(jsonNode, null, null));
|
||||
stringBuffer = newBuffer;
|
||||
}
|
||||
}
|
||||
}else {
|
||||
if(initFound && size > 1) {
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
newBuffer.append("SELECT FROM ( ");
|
||||
newBuffer.append(stringBuffer);
|
||||
newBuffer.append(")");
|
||||
newBuffer.append(" WHERE ");
|
||||
newBuffer.append(addConstraints(jsonNode, null, null));
|
||||
stringBuffer = newBuffer;
|
||||
}
|
||||
// The Resource has no other referenced ER inside
|
||||
if(!wrapInnerQuery) {
|
||||
return createSelect(stringBuffer, wrapInnerQuery);
|
||||
}
|
||||
|
||||
if(entryPoint || size>1) {
|
||||
return createSelect(stringBuffer, wrapInnerQuery);
|
||||
}
|
||||
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StringBuffer getSpecificMatchQuery(List<JsonQueryERElement> childrenBreadcrumb) throws SchemaException, ResourceRegistryException {
|
||||
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
|
||||
int isRelatedToSize = 0;
|
||||
|
||||
ArrayNode consistsOfArray = (ArrayNode) jsonNode.get(Resource.CONSISTS_OF_PROPERTY);
|
||||
int consistsOfSize = 0;
|
||||
if(consistsOfArray!=null) {
|
||||
consistsOfSize = consistsOfArray.size();
|
||||
}
|
||||
|
||||
int total = consistsOfSize;
|
||||
|
||||
ArrayNode isRelatedToArray = (ArrayNode) jsonNode.get(Resource.IS_RELATED_TO_PROPERTY);
|
||||
if(isRelatedToArray!=null && isRelatedToArray.size()>0) {
|
||||
--size;
|
||||
isRelatedToSize = isRelatedToArray.size();
|
||||
total += isRelatedToSize;
|
||||
|
||||
for(int i=0; i<isRelatedToSize; i++) {
|
||||
JsonNode isRelatedToJsonNode = isRelatedToArray.get(i);
|
||||
JsonQueryIsRelatedTo jsonQueryIsRelatedTo = new JsonQueryIsRelatedTo(isRelatedToJsonNode);
|
||||
jsonQueryIsRelatedTo.setRequestedResourceType(type);
|
||||
jsonQueryIsRelatedTo.setDirectionByJson(true);
|
||||
jsonQueryIsRelatedTo.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryIsRelatedTo.setPosition(i);
|
||||
|
||||
boolean traverseBack = true;
|
||||
// boolean traverseBack = false;
|
||||
// if(i<(isRelatedToSize-1) || consistsOfSize>0) {
|
||||
// traverseBack = true;
|
||||
// }
|
||||
jsonQueryIsRelatedTo.setTraverseBack(traverseBack);
|
||||
|
||||
newBuffer = jsonQueryIsRelatedTo.createMatchQuery(newBuffer);
|
||||
|
||||
if(traverseBack) {
|
||||
newBuffer.append("\n\t.");
|
||||
newBuffer.append(jsonQueryIsRelatedTo.getDirection().name().toLowerCase());
|
||||
newBuffer.append("V('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("') ");
|
||||
newBuffer.append("{ where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
if(entryPoint && i<(total-1)) {
|
||||
newBuffer.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(consistsOfSize>0) {
|
||||
--size;
|
||||
for(int i=0; i<consistsOfArray.size(); i++) {
|
||||
JsonNode consistsOfJsonNode = consistsOfArray.get(i);
|
||||
JsonQueryConsistsOf jsonQueryConsistsOf = new JsonQueryConsistsOf(consistsOfJsonNode);
|
||||
jsonQueryConsistsOf.setRequestedResourceType(type);
|
||||
jsonQueryConsistsOf.setDirection(Direction.OUT);
|
||||
jsonQueryConsistsOf.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryConsistsOf.setPosition(isRelatedToSize+i);
|
||||
|
||||
boolean traverseBack = true;
|
||||
jsonQueryConsistsOf.setTraverseBack(traverseBack);
|
||||
|
||||
newBuffer = jsonQueryConsistsOf.createMatchQuery(newBuffer);
|
||||
|
||||
if(traverseBack) {
|
||||
newBuffer.append("\n\t.");
|
||||
newBuffer.append(jsonQueryConsistsOf.getDirection().name().toLowerCase());
|
||||
newBuffer.append("V('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("')");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
if(entryPoint && i<(consistsOfSize-1)) {
|
||||
newBuffer.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
|
||||
if(!entryPoint) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("V('");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
alias = getAlias(true);
|
||||
StringBuffer sb = null;
|
||||
if(size > 0) {
|
||||
sb = addConstraints(jsonNode, null, null);
|
||||
}
|
||||
|
||||
buffer.append(" {");
|
||||
buffer.append(" as: ");
|
||||
buffer.append(alias);
|
||||
buffer.append(",");
|
||||
buffer.append(" where: ");
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append("(");
|
||||
}
|
||||
buffer.append("($currentMatch['@class'] INSTANCEOF '");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append(" AND (");
|
||||
buffer.append(sb);
|
||||
buffer.append(")");
|
||||
buffer.append(")");
|
||||
}
|
||||
|
||||
buffer.append("}");
|
||||
}
|
||||
|
||||
buffer.append(newBuffer);
|
||||
if(entryPoint) {
|
||||
buffer.append("\n");
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.json.base.relations;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
import org.gcube.informationsystem.base.reference.Direction;
|
||||
import org.gcube.informationsystem.model.reference.relations.ConsistsOf;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.JsonQueryERElement;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.entities.JsonQueryFacet;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.entities.JsonQueryResource;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
|
@ -29,30 +33,58 @@ public class JsonQueryConsistsOf extends JsonQueryRelation {
|
|||
this.requestedResourceType = requestedResourceType;
|
||||
}
|
||||
|
||||
protected StringBuffer traverseBackToCallerResource(StringBuffer stringBuffer) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
buffer.append("V(\"");
|
||||
buffer.append(requestedResourceType);
|
||||
buffer.append("\") FROM ( "); // Open (
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")"); // Close )
|
||||
return buffer;
|
||||
}
|
||||
|
||||
|
||||
public StringBuffer createSelect(StringBuffer stringBuffer, boolean wrapInnerQuery) throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("SELECT FROM ");
|
||||
|
||||
if(wrapInnerQuery) {
|
||||
buffer.append("( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
}else {
|
||||
buffer.append(type);
|
||||
}
|
||||
|
||||
if(entryPoint || size>1) {
|
||||
buffer.append(" WHERE ");
|
||||
}
|
||||
|
||||
if(size > 1) {
|
||||
buffer.append(addConstraints(jsonNode, null, null));
|
||||
if(entryPoint) {
|
||||
buffer.append(" AND ");
|
||||
}
|
||||
}
|
||||
|
||||
if(entryPoint) {
|
||||
buffer.append(OrientDBUtility.ORIENTDB_CLASS_PROPERTY);
|
||||
buffer.append(" INSTANCEOF \"");
|
||||
buffer.append(type);
|
||||
buffer.append("\"");
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StringBuffer analize(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer consistsOfBuffer = new StringBuffer();
|
||||
public StringBuffer createTraversalQuery(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
|
||||
if(!jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
consistsOfBuffer.append("TRAVERSE ");
|
||||
consistsOfBuffer.append(direction.opposite().name().toLowerCase());
|
||||
consistsOfBuffer.append("V(\"");
|
||||
consistsOfBuffer.append(requestedResourceType);
|
||||
consistsOfBuffer.append("\") FROM ( "); // Open ( 1
|
||||
}
|
||||
boolean wrapInnerQuery = false;
|
||||
|
||||
int size = jsonNode.size();
|
||||
if(size > 2) {
|
||||
consistsOfBuffer.append("SELECT FROM ( "); // Open ( SELECT
|
||||
}
|
||||
|
||||
consistsOfBuffer.append("TRAVERSE ");
|
||||
consistsOfBuffer.append(direction.name().toLowerCase());
|
||||
consistsOfBuffer.append("E(\"");
|
||||
consistsOfBuffer.append(type);
|
||||
consistsOfBuffer.append("\") FROM ( "); // Open ( 2
|
||||
|
||||
if(!entryPoint) {
|
||||
if(traverseBack) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
|
@ -62,34 +94,162 @@ public class JsonQueryConsistsOf extends JsonQueryRelation {
|
|||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
if(jsonNode.has(ConsistsOf.TARGET_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode facetJsonNode = jsonNode.get(ConsistsOf.TARGET_PROPERTY);
|
||||
JsonQueryFacet jsonQueryFacet = new JsonQueryFacet(facetJsonNode);
|
||||
jsonQueryFacet.setEntryPoint(entryPoint);
|
||||
stringBuffer = jsonQueryFacet.analize(stringBuffer);
|
||||
} else if(jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
jsonQueryFacet.setTraverseBack(!((!traverseBack) && !wrapInnerQuery));
|
||||
stringBuffer = jsonQueryFacet.createTraversalQuery(stringBuffer);
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
if(jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode resourceJsonNode = jsonNode.get(ConsistsOf.SOURCE_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(resourceJsonNode);
|
||||
jsonQueryResource.setEntryPoint(entryPoint);
|
||||
stringBuffer = jsonQueryResource.analize(stringBuffer);
|
||||
jsonQueryResource.setTraverseBack(!((!traverseBack) && !wrapInnerQuery));
|
||||
stringBuffer = jsonQueryResource.createTraversalQuery(stringBuffer);
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
consistsOfBuffer.append(stringBuffer);
|
||||
consistsOfBuffer.append(")"); // Close ) 2
|
||||
|
||||
// Size 2 means that only 'type' and 'target' properties are present
|
||||
if(size > 2) {
|
||||
consistsOfBuffer.append(") WHERE "); // Close ) SELECT
|
||||
consistsOfBuffer.append(addConstraints(jsonNode, null, null));
|
||||
if(wrapInnerQuery) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("E(\"");
|
||||
buffer.append(type);
|
||||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
}
|
||||
|
||||
if(!jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
consistsOfBuffer.append(")"); // Close ) 1
|
||||
if(entryPoint || size>1) {
|
||||
stringBuffer = createSelect(stringBuffer, wrapInnerQuery);
|
||||
}
|
||||
|
||||
return consistsOfBuffer;
|
||||
if(!entryPoint && requestedResourceType!=null) {
|
||||
stringBuffer = traverseBackToCallerResource(stringBuffer);
|
||||
}
|
||||
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StringBuffer getSpecificMatchQuery(List<JsonQueryERElement> childrenBreadcrumb)
|
||||
throws SchemaException, ResourceRegistryException {
|
||||
int childrenPosition = 0;
|
||||
|
||||
boolean traverseBack = this.traverseBack;
|
||||
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
if(jsonNode.has(ConsistsOf.TARGET_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode facetJsonNode = jsonNode.get(ConsistsOf.TARGET_PROPERTY);
|
||||
JsonQueryFacet jsonQueryFacet = new JsonQueryFacet(facetJsonNode);
|
||||
jsonQueryFacet.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryFacet.setPosition(childrenPosition++);
|
||||
jsonQueryFacet.setTraverseBack(true);
|
||||
Direction direction = Direction.IN;
|
||||
jsonQueryFacet.setDirection(direction);
|
||||
newBuffer = jsonQueryFacet.createMatchQuery(newBuffer);
|
||||
|
||||
newBuffer.append("\n\t");
|
||||
newBuffer.append(".");
|
||||
newBuffer.append(direction.name().toLowerCase());
|
||||
newBuffer.append("E('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("') ");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
traverseBack = false;
|
||||
}
|
||||
|
||||
if(jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode resourceJsonNode = jsonNode.get(ConsistsOf.SOURCE_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(resourceJsonNode);
|
||||
jsonQueryResource.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryResource.setPosition(childrenPosition++);
|
||||
jsonQueryResource.setTraverseBack(true);
|
||||
Direction direction = Direction.OUT;
|
||||
jsonQueryResource.setDirection(direction);
|
||||
newBuffer = jsonQueryResource.createMatchQuery(newBuffer);
|
||||
|
||||
newBuffer.append("\n\t");
|
||||
newBuffer.append(".");
|
||||
newBuffer.append(direction.name().toLowerCase());
|
||||
newBuffer.append("E('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("') ");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
traverseBack = false;
|
||||
}
|
||||
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
|
||||
if(!entryPoint) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("E('");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
|
||||
alias = getAlias(true);
|
||||
StringBuffer sb = null;
|
||||
if(size > 0) {
|
||||
sb = addConstraints(jsonNode, null, null);
|
||||
}
|
||||
|
||||
buffer.append(" {");
|
||||
buffer.append(" as: ");
|
||||
buffer.append(alias);
|
||||
buffer.append(",");
|
||||
buffer.append(" where: ");
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append("(");
|
||||
}
|
||||
buffer.append("($currentMatch['@class'] INSTANCEOF '");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append(" AND (");
|
||||
buffer.append(sb);
|
||||
buffer.append(")");
|
||||
buffer.append(")");
|
||||
}
|
||||
|
||||
buffer.append("}");
|
||||
|
||||
|
||||
}
|
||||
|
||||
buffer.append(newBuffer);
|
||||
|
||||
if(traverseBack) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
buffer.append("E('");
|
||||
buffer.append(type);
|
||||
buffer.append("') ");
|
||||
buffer.append(" { where: ($matched.");
|
||||
buffer.append(alias);
|
||||
buffer.append(" == $currentMatch)}");
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,13 +1,20 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.json.base.relations;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import javax.ws.rs.InternalServerErrorException;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.informationsystem.base.reference.AccessType;
|
||||
import org.gcube.informationsystem.base.reference.Direction;
|
||||
import org.gcube.informationsystem.model.reference.relations.ConsistsOf;
|
||||
import org.gcube.informationsystem.model.reference.relations.IsRelatedTo;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.queries.InvalidQueryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.JsonQueryERElement;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.base.entities.JsonQueryResource;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
|
@ -18,6 +25,7 @@ public class JsonQueryIsRelatedTo extends JsonQueryRelation {
|
|||
|
||||
public JsonQueryIsRelatedTo(JsonNode jsonQuery) throws SchemaException, ResourceRegistryException {
|
||||
super(jsonQuery, AccessType.IS_RELATED_TO);
|
||||
direction = null;
|
||||
}
|
||||
|
||||
public String getRequestedResourceType() {
|
||||
|
@ -28,38 +36,105 @@ public class JsonQueryIsRelatedTo extends JsonQueryRelation {
|
|||
this.requestedResourceType = requestedResourceType;
|
||||
}
|
||||
|
||||
private StringBuffer traverseThisEdge(StringBuffer stringBuffer) throws InvalidQueryException {
|
||||
protected StringBuffer traverseBackToCallerResource(StringBuffer stringBuffer) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
buffer.append("V(\"");
|
||||
buffer.append(requestedResourceType);
|
||||
buffer.append("\") FROM ( "); // Open (
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")"); // Close )
|
||||
return buffer;
|
||||
}
|
||||
|
||||
int size = jsonNode.size();
|
||||
public StringBuffer createSelect(StringBuffer stringBuffer, boolean wrapInnerQuery) throws SchemaException, ResourceRegistryException {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("SELECT FROM ");
|
||||
|
||||
// Remove type from size
|
||||
--size;
|
||||
if(wrapInnerQuery) {
|
||||
buffer.append("( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
}else {
|
||||
buffer.append(type);
|
||||
}
|
||||
|
||||
if(entryPoint || size>1) {
|
||||
buffer.append(" WHERE ");
|
||||
}
|
||||
|
||||
if(size > 1) {
|
||||
buffer.append(addConstraints(jsonNode, null, null));
|
||||
if(entryPoint) {
|
||||
buffer.append(" AND ");
|
||||
}
|
||||
}
|
||||
|
||||
if(entryPoint) {
|
||||
buffer.append(OrientDBUtility.ORIENTDB_CLASS_PROPERTY);
|
||||
buffer.append(" INSTANCEOF \"");
|
||||
buffer.append(type);
|
||||
buffer.append("\"");
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
public void setDirectionByJson() throws InvalidQueryException {
|
||||
setDirectionByJson(false);
|
||||
}
|
||||
|
||||
public void setDirectionByJson(boolean matchQuery) throws InvalidQueryException {
|
||||
if(entryPoint) {
|
||||
String error = "The function JsonQueryIsRelatedTo#setDirectionByJson() cannot be called for an entry point";
|
||||
logger.error(error);
|
||||
throw new InternalServerErrorException(error);
|
||||
}
|
||||
|
||||
boolean found = false;
|
||||
|
||||
if(jsonNode.has(IsRelatedTo.SOURCE_PROPERTY)) {
|
||||
--size;
|
||||
logger.trace("{} for type {} has {} property", IsRelatedTo.NAME, type, IsRelatedTo.SOURCE_PROPERTY);
|
||||
direction = Direction.OUT;
|
||||
found = true;
|
||||
}
|
||||
|
||||
if(jsonNode.has(IsRelatedTo.TARGET_PROPERTY)) {
|
||||
--size;
|
||||
if(found) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append(IsRelatedTo.NAME);
|
||||
buffer.append(" for type ");
|
||||
buffer.append(type);
|
||||
buffer.append(" has both ");
|
||||
buffer.append(IsRelatedTo.SOURCE_PROPERTY);
|
||||
buffer.append(" and ");
|
||||
buffer.append(IsRelatedTo.TARGET_PROPERTY);
|
||||
buffer.append(" property. Only entry points can have both because one is implicit from the resource containg the ");
|
||||
buffer.append(IsRelatedTo.NAME);
|
||||
buffer.append(" relation.");
|
||||
logger.error("This part of the json query is not valid {}\n{}", jsonNode.toString(), buffer.toString());
|
||||
throw new InvalidQueryException(buffer.toString());
|
||||
}
|
||||
direction = Direction.IN;
|
||||
}
|
||||
|
||||
if(size > 0) {
|
||||
buffer.append("SELECT FROM ");
|
||||
if(entryPoint) {
|
||||
buffer.append(type);
|
||||
}else {
|
||||
buffer.append(" ( "); // Open ( SELECT
|
||||
}
|
||||
if(matchQuery) {
|
||||
direction = direction.opposite();
|
||||
}
|
||||
}
|
||||
|
||||
}else {
|
||||
if(entryPoint) {
|
||||
buffer.append("SELECT FROM ");
|
||||
buffer.append(type);
|
||||
}
|
||||
@Override
|
||||
public StringBuffer createTraversalQuery(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
|
||||
if(!entryPoint && direction==null) {
|
||||
throw new InternalServerErrorException("Caller Resource must invoke setDirectionByJson() first. This is a server bug. Please contact the administator. ");
|
||||
}
|
||||
|
||||
if(!entryPoint) {
|
||||
boolean wrapInnerQuery = false;
|
||||
|
||||
if(traverseBack) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
buffer.append("E(\"");
|
||||
|
@ -67,84 +142,188 @@ public class JsonQueryIsRelatedTo extends JsonQueryRelation {
|
|||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
Direction wrapDirection = direction;
|
||||
|
||||
stringBuffer = buffer;
|
||||
if(jsonNode.has(IsRelatedTo.SOURCE_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode sourceJsonNode = jsonNode.get(IsRelatedTo.SOURCE_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(sourceJsonNode);
|
||||
wrapDirection = Direction.OUT;
|
||||
jsonQueryResource.setDirection(Direction.OUT);
|
||||
jsonQueryResource.setTraverseBack(!((!traverseBack) && !wrapInnerQuery));
|
||||
stringBuffer = jsonQueryResource.createTraversalQuery(stringBuffer);
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
// Size 0 means that only 'type' and 'target'/'source' properties are present
|
||||
if(size > 0) {
|
||||
if(!entryPoint) {
|
||||
stringBuffer.append(" )"); // Close ) SELECT
|
||||
if(jsonNode.has(IsRelatedTo.TARGET_PROPERTY)) {
|
||||
if(jsonNode.has(IsRelatedTo.SOURCE_PROPERTY)) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(wrapDirection.name().toLowerCase());
|
||||
buffer.append("E(\"");
|
||||
buffer.append(type);
|
||||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
}
|
||||
stringBuffer.append(" WHERE ");
|
||||
stringBuffer.append(addConstraints(jsonNode, null, null));
|
||||
|
||||
--size;
|
||||
JsonNode targetJsonNode = jsonNode.get(IsRelatedTo.TARGET_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(targetJsonNode);
|
||||
wrapDirection = Direction.IN;
|
||||
jsonQueryResource.setDirection(Direction.IN);
|
||||
jsonQueryResource.setTraverseBack(!((!traverseBack) && !wrapInnerQuery));
|
||||
stringBuffer = jsonQueryResource.createTraversalQuery(stringBuffer);
|
||||
wrapInnerQuery = true;
|
||||
}
|
||||
|
||||
if(wrapInnerQuery) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(wrapDirection.name().toLowerCase());
|
||||
buffer.append("E(\"");
|
||||
buffer.append(type);
|
||||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
stringBuffer = buffer;
|
||||
}
|
||||
|
||||
if(entryPoint || size>1) {
|
||||
stringBuffer = createSelect(stringBuffer, wrapInnerQuery);
|
||||
}
|
||||
|
||||
if(!entryPoint) {
|
||||
stringBuffer = traverseBackToCallerResource(stringBuffer);
|
||||
}
|
||||
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StringBuffer analize(StringBuffer stringBuffer) throws SchemaException, ResourceRegistryException {
|
||||
protected StringBuffer getSpecificMatchQuery(List<JsonQueryERElement> childrenBreadcrumb) throws SchemaException, ResourceRegistryException {
|
||||
|
||||
JsonNode sourceJsonNode = jsonNode.get(IsRelatedTo.SOURCE_PROPERTY);
|
||||
JsonNode targetJsonNode = jsonNode.get(IsRelatedTo.TARGET_PROPERTY);
|
||||
|
||||
JsonNode resourceJsonNode = null;
|
||||
|
||||
if(sourceJsonNode!=null) {
|
||||
resourceJsonNode = sourceJsonNode;
|
||||
direction = Direction.OUT;
|
||||
} else if(targetJsonNode!=null) {
|
||||
resourceJsonNode = targetJsonNode;
|
||||
direction = Direction.IN;
|
||||
if(!entryPoint && direction==null) {
|
||||
throw new InternalServerErrorException("Caller Resource must invoke setDirectionByJson() first. This is a server bug. Please contact the administator. ");
|
||||
}
|
||||
|
||||
stringBuffer = traverseThisEdge(stringBuffer);
|
||||
int childrenPosition = 0;
|
||||
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(resourceJsonNode);
|
||||
jsonQueryResource.setDirection(direction);
|
||||
jsonQueryResource.setEntryPoint(false);
|
||||
stringBuffer = jsonQueryResource.analize(stringBuffer);
|
||||
boolean traverseBack = this.traverseBack;
|
||||
|
||||
StringBuffer newBuffer = new StringBuffer();
|
||||
if(jsonNode.has(ConsistsOf.TARGET_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode targetJsonNode = jsonNode.get(IsRelatedTo.TARGET_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(targetJsonNode);
|
||||
Direction direction = Direction.IN;
|
||||
jsonQueryResource.setDirection(direction);
|
||||
jsonQueryResource.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryResource.setPosition(childrenPosition++);
|
||||
jsonQueryResource.setTraverseBack(true);
|
||||
newBuffer = jsonQueryResource.createMatchQuery(newBuffer);
|
||||
|
||||
newBuffer.append("\n\t");
|
||||
newBuffer.append(".");
|
||||
newBuffer.append(direction.name().toLowerCase());
|
||||
newBuffer.append("E('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("') ");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
traverseBack = false;
|
||||
|
||||
}
|
||||
|
||||
if(jsonNode.has(ConsistsOf.SOURCE_PROPERTY)) {
|
||||
--size;
|
||||
JsonNode sourceJsonNode = jsonNode.get(IsRelatedTo.SOURCE_PROPERTY);
|
||||
JsonQueryResource jsonQueryResource = new JsonQueryResource(sourceJsonNode);
|
||||
Direction direction = Direction.OUT;
|
||||
jsonQueryResource.setDirection(direction);
|
||||
jsonQueryResource.setBreadcrumb(childrenBreadcrumb);
|
||||
jsonQueryResource.setPosition(childrenPosition++);
|
||||
jsonQueryResource.setTraverseBack(true);
|
||||
newBuffer = jsonQueryResource.createMatchQuery(newBuffer);
|
||||
|
||||
newBuffer.append("\n\t");
|
||||
newBuffer.append(".");
|
||||
newBuffer.append(direction.name().toLowerCase());
|
||||
newBuffer.append("E('");
|
||||
newBuffer.append(type);
|
||||
newBuffer.append("') ");
|
||||
newBuffer.append(" { where: ($matched.");
|
||||
newBuffer.append(getAlias(true));
|
||||
newBuffer.append(" == $currentMatch)}");
|
||||
|
||||
traverseBack = false;
|
||||
}
|
||||
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
if(requestedResourceType!=null) {
|
||||
buffer.append("TRAVERSE ");
|
||||
|
||||
if(!entryPoint) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("E('");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
|
||||
alias = getAlias(true);
|
||||
StringBuffer sb = null;
|
||||
if(size > 0) {
|
||||
sb = addConstraints(jsonNode, null, null);
|
||||
}
|
||||
|
||||
buffer.append(" {");
|
||||
buffer.append(" as: ");
|
||||
buffer.append(alias);
|
||||
buffer.append(",");
|
||||
buffer.append(" where: ");
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append("(");
|
||||
}
|
||||
buffer.append("($currentMatch['@class'] INSTANCEOF '");
|
||||
buffer.append(type);
|
||||
buffer.append("')");
|
||||
|
||||
if(sb!=null && sb.length()>0) {
|
||||
buffer.append(" AND (");
|
||||
buffer.append(sb);
|
||||
buffer.append(")");
|
||||
buffer.append(")");
|
||||
}
|
||||
|
||||
buffer.append("}");
|
||||
|
||||
|
||||
}
|
||||
|
||||
buffer.append(newBuffer);
|
||||
|
||||
if(traverseBack) {
|
||||
buffer.append("\n\t");
|
||||
buffer.append(".");
|
||||
buffer.append(direction.opposite().name().toLowerCase());
|
||||
buffer.append("V(\"");
|
||||
buffer.append(requestedResourceType);
|
||||
buffer.append("\") FROM ( ");
|
||||
}
|
||||
buffer.append("TRAVERSE ");
|
||||
buffer.append(direction.name().toLowerCase());
|
||||
buffer.append("E(\"");
|
||||
buffer.append(type);
|
||||
buffer.append("\") FROM ( ");
|
||||
buffer.append(stringBuffer);
|
||||
buffer.append(")");
|
||||
if(requestedResourceType!=null) {
|
||||
buffer.append(")");
|
||||
}
|
||||
stringBuffer = buffer;
|
||||
|
||||
if(sourceJsonNode!=null && targetJsonNode!=null) {
|
||||
// Target has still to be analised
|
||||
|
||||
jsonQueryResource = new JsonQueryResource(targetJsonNode);
|
||||
jsonQueryResource.setDirection(Direction.IN);
|
||||
jsonQueryResource.setEntryPoint(false);
|
||||
stringBuffer = jsonQueryResource.analize(stringBuffer);
|
||||
|
||||
boolean entryPointOldValue = entryPoint;
|
||||
// It is no more and entry point for the function traverseThisEdge
|
||||
entryPoint = false;
|
||||
stringBuffer = traverseThisEdge(stringBuffer);
|
||||
// Restoring entryPoint indication
|
||||
entryPoint = entryPointOldValue;
|
||||
buffer.append("E('");
|
||||
buffer.append(type);
|
||||
buffer.append("') ");
|
||||
buffer.append(" { where: ($matched.");
|
||||
buffer.append(getAlias(true));
|
||||
buffer.append(" == $currentMatch)}");
|
||||
}
|
||||
|
||||
return stringBuffer;
|
||||
return buffer;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.NotImplementedException;
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.informationsystem.types.PropertyTypeName.BaseTypeGroup;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* See https://www.orientdb.com/docs/3.0.x/sql/SQL-Where.html
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#conditions
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#comparison-operators
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#boolean-operators
|
||||
*/
|
||||
public enum ComparisonOperator {
|
||||
|
||||
EQ("_eq", " = ", 2, BaseTypeGroup.ANY, "Matches values that are equal to a specified value. E.g. `name = 'Luke'`"),
|
||||
GT("_gt", " > ", 2, BaseTypeGroup.ANY, "Matches values that are greater than a specified value. "),
|
||||
GTE("_gte", " >= ", 2, BaseTypeGroup.ANY, "Matches values that are greater than or equal to a specified value."),
|
||||
LT("_lt", " < ", 2, BaseTypeGroup.ANY, "Matches values that are less than a specified value."),
|
||||
LTE("_lte", " <= ", 2, BaseTypeGroup.ANY, "Matches values that are less than or equal to a specified value."),
|
||||
NE("_ne", " <> ", 2, BaseTypeGroup.ANY, "Matches all values that are not equal to a specified value."),
|
||||
BETWEEN("_between", " BETWEEN %s AND %s", 3, BaseTypeGroup.ANY, "Returns TRUE is a value is between two values, eg. 5 BETWEEN 1 AND 10. The value is between a range. E.g. `price BETWEEN 10 AND 30`. It's equivalent to `price >= 10 AND price <= 30`."),
|
||||
IS("_is", " IS ", 2, BaseTypeGroup.ANY, "Used to test if a value is NULL"),
|
||||
|
||||
LIKE("_like", " LIKE ", 2, BaseTypeGroup.STRING, "For strings, checks if a string contains another string. % is used as a wildcard, eg. 'foobar CONTAINS '%ooba%''. Similar to equals, but allow the wildcard '%' that means 'any'. E.g. `name LIKE 'Luk%'`"),
|
||||
CONTAINS_TEXT("_containsText", " CONTAINSTEXT ", 2, BaseTypeGroup.STRING, "The string contains such text. E.g. `text CONTAINSTEXT 'jay'`"),
|
||||
MATCHES("_matches", " MATCHES ", 2, BaseTypeGroup.STRING, "Checks if a string matches a regular expression. Matches the string using a Regular Expression. E.g. `text MATCHES '\b[A-Z0-9.%+-]+@[A-Z0-9.-]+.[A-Z]{2,4}\b'`"),
|
||||
|
||||
IN("_in", " IN ", 2, BaseTypeGroup.COLLECTION, "The same as CONTAINS, but with inverted operands. Matches any of the values specified in an array. E.g. `name in ['European','Asiatic']`"),
|
||||
CONTAINS("_contains", " CONTAINS ", 2, BaseTypeGroup.COLLECTION, "Checks if the left collection contains the right element. The left argument has to be a colleciton, otherwise it returns FALSE. It's NOT the check of colleciton intersections, so ['a', 'b', 'c'] CONTAINS ['a', 'b'] will return FALSE, while ['a', 'b', 'c'] CONTAINS 'a' will return TRUE. True if the collection contains at least one element that satisfy the next condition. Condition can be a single item: in this case the behaviour is like the IN operator. E.g. `children contains (name = 'Luke')` - `map.values() contains (name = 'Luke')`"),
|
||||
CONTAINS_ALL("_containsAll", " CONTAINSALL ", 2, BaseTypeGroup.COLLECTION, "True if all the elements of the collection satisfy the next condition. E.g. `children CONTAINSALL (name = 'Luke')`"),
|
||||
CONTAINS_ANY("_containsAny", " CONTAINSANY ", 2, BaseTypeGroup.COLLECTION, "True if all the elements of the collection satisfy the next condition. E.g. `children CONTAINSANY (name = 'Luke')`"),
|
||||
|
||||
CONTAINS_KEY("_containsKey", " CONTAINSKEY ", 2, BaseTypeGroup.MAP, "For maps, the same as for CONTAINS, but checks on the map keys. True if the map contains at least one key equals to the requested. You can also use map.keys() CONTAINS in place of it. E.g. `connections CONTAINSKEY 'Luke'`"),
|
||||
CONTAINS_VALUE("_containsValue", " CONTAINSVALUE ", 2, BaseTypeGroup.MAP , "For maps, the same as for CONTAINS, but checks on the map values. True if the map contains at least one value equals to the requested. You can also use map.values() CONTAINS in place of it. E.g. `connections containsValue 10:3`"),
|
||||
|
||||
|
||||
IS_DEFINED("_isDefined", " IS DEFINED ", 1, BaseTypeGroup.ANY, "Returns TRUE is a field is defined in a document"),
|
||||
IS_NOT_DEFINED("_isNotDefined", " IS NOT DEFINED ", 1, BaseTypeGroup.ANY, "Returns TRUE is a field is not defined in a document");
|
||||
|
||||
protected final String operatorKey;
|
||||
protected final int numberOfOperand;
|
||||
protected final String dbOperator;
|
||||
protected final BaseTypeGroup allowed;
|
||||
protected final String description;
|
||||
|
||||
private ComparisonOperator(String operatorKey, String dbOperator, int numberOfOperand, BaseTypeGroup allowed, String description) {
|
||||
this.operatorKey = operatorKey;
|
||||
this.dbOperator = dbOperator;
|
||||
this.numberOfOperand = numberOfOperand;
|
||||
this.allowed = allowed;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
protected String getOperatorKey() {
|
||||
return operatorKey;
|
||||
}
|
||||
|
||||
public String getDbOperator() {
|
||||
return dbOperator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,ComparisonOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
ComparisonOperator.operators = new HashSet<>();
|
||||
ComparisonOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(ComparisonOperator queryComparisonOperator : ComparisonOperator.values()) {
|
||||
ComparisonOperator.operators.add(queryComparisonOperator.getOperatorKey());
|
||||
ComparisonOperator.operatorByKey.put(queryComparisonOperator.getOperatorKey(), queryComparisonOperator);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return ComparisonOperator.operators;
|
||||
}
|
||||
|
||||
public static ComparisonOperator getOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
|
||||
public StringBuffer addCondition(String... operands) {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
stringBuffer.append(operands[0]);
|
||||
stringBuffer.append(getDbOperator());
|
||||
stringBuffer.append(operands[1]);
|
||||
return stringBuffer;
|
||||
}
|
||||
|
||||
public static String getValue(JsonNode jsonNode) {
|
||||
StringBuffer stringBuffer = new StringBuffer();
|
||||
|
||||
String value = jsonNode.asText();
|
||||
if(jsonNode.isNumber()) {
|
||||
stringBuffer.append(value);
|
||||
} else {
|
||||
stringBuffer.append("\"");
|
||||
stringBuffer.append(value);
|
||||
stringBuffer.append("\"");
|
||||
}
|
||||
return stringBuffer.toString();
|
||||
}
|
||||
|
||||
public StringBuffer addCondition(JsonNode jn, String fieldNamePrefix) {
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*
|
||||
* Methods: also called "Field Operators"
|
||||
*
|
||||
* SQL Methods are similar to SQL functions but they apply to values.
|
||||
* In Object Oriented paradigm they are called "methods", as functions related to a class.
|
||||
* So what's the difference between a function and a method?
|
||||
*
|
||||
* This is a SQL function:
|
||||
*
|
||||
* SELECT sum( salary ) FROM employee
|
||||
*
|
||||
* This is a SQL method:
|
||||
*
|
||||
* SELECT salary.toJSON() FROM employee
|
||||
*
|
||||
* As you can see the method is executed against a field/value.
|
||||
* Methods can receive parameters, like functions.
|
||||
* You can concatenate N operators in sequence.
|
||||
*
|
||||
* See https://orientdb.com/docs/3.0.x/sql/SQL-Where.html#methods
|
||||
* http://orientdb.com/docs/3.0.x/sql/SQL-Methods.html
|
||||
*/
|
||||
public enum FieldOperator {
|
||||
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* See https://www.orientdb.com/docs/3.0.x/sql/SQL-Functions.html
|
||||
*/
|
||||
public class Function {
|
||||
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* See https://www.orientdb.com/docs/3.0.x/sql/SQL-Where.html
|
||||
*/
|
||||
public enum LogicalOperator {
|
||||
|
||||
AND("_and", " AND ", "true if both the conditions are true"),
|
||||
OR("_or", " OR ", "true if at least one of the condition is true"),
|
||||
NOT("_not", " NOT ", "true if the condition is false.");
|
||||
|
||||
protected final String operatorKey;
|
||||
protected final String dbOperator;
|
||||
protected final String description;
|
||||
|
||||
private LogicalOperator(String operatorKey, String dbOperator, String description) {
|
||||
this.operatorKey = operatorKey;
|
||||
this.dbOperator = dbOperator;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getOperatorKey() {
|
||||
return operatorKey;
|
||||
}
|
||||
|
||||
public String getDbOperator() {
|
||||
return dbOperator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,LogicalOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
LogicalOperator.operators = new HashSet<>();
|
||||
LogicalOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(LogicalOperator logicalOperator : LogicalOperator.values()) {
|
||||
LogicalOperator.operators.add(logicalOperator.getOperatorKey());
|
||||
LogicalOperator.operatorByKey.put(logicalOperator.getOperatorKey(), logicalOperator);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return LogicalOperator.operators;
|
||||
}
|
||||
|
||||
public static LogicalOperator getOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,180 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.BadRequestException;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.node.TextNode;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*
|
||||
* OrientDB supports the eval() function to execute complex operations. Example:
|
||||
*
|
||||
* SELECT eval( "amount * 120 / 100 - discount" ) as finalPrice from Order
|
||||
|
||||
*
|
||||
* See https://www.orientdb.com/docs/3.0.x/sql/SQL-Where.html#mathematics-operators
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#math-operators
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#math-operators-precedence
|
||||
* https://www.orientdb.com/docs/3.0.x/sql/SQL-Syntax.html#array-concatenation
|
||||
*/
|
||||
public enum MatemathicsOperator {
|
||||
|
||||
SUM("_sum", "+", ""),
|
||||
MINUS("_minus", "-", ""),
|
||||
MULTIPLY("_multiply", "*", ""),
|
||||
DIVIDE("_divide", "/", ""),
|
||||
MODULE("_mod", "%", ""),
|
||||
BITWISE_RIGHT_SHIFT("_bitrshift", ">>", ""),
|
||||
BITWISE_LEFT_SHIFT("_bitlshift", "<<", ""),
|
||||
BITWISE_AND("_bitand", "&", ""),
|
||||
BITWISE_OR("_bitor", "|", ""),
|
||||
BITWISE_XOR("_bitxor", "^", ""),
|
||||
ARRAY_CONCATENATION("_arrayconcat", "||", "");
|
||||
|
||||
protected static Logger logger = LoggerFactory.getLogger(MatemathicsOperator.class);
|
||||
|
||||
public static final String VALUES_KEY = "values";
|
||||
public static final String SEPARATOR_KEY = "separator";
|
||||
public static final String AS_KEY = "as";
|
||||
|
||||
protected final String operatorKey;
|
||||
protected final String dbOperator;
|
||||
protected final String description;
|
||||
|
||||
private MatemathicsOperator(String operatorKey, String dbOperator, String description) {
|
||||
this.operatorKey = operatorKey;
|
||||
this.dbOperator = dbOperator;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
protected String getOperatorKey() {
|
||||
return operatorKey;
|
||||
}
|
||||
|
||||
protected String getDbOperator() {
|
||||
return dbOperator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,MatemathicsOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
MatemathicsOperator.operators = new HashSet<>();
|
||||
MatemathicsOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(MatemathicsOperator matemathicsOperator : MatemathicsOperator.values()) {
|
||||
MatemathicsOperator.operators.add(matemathicsOperator.getOperatorKey());
|
||||
MatemathicsOperator.operatorByKey.put(matemathicsOperator.getOperatorKey(), matemathicsOperator);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return operators;
|
||||
}
|
||||
|
||||
public static MatemathicsOperator getOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
|
||||
protected String getError(String key, Class<?> clazz, boolean mandatory) {
|
||||
StringBuffer error = new StringBuffer();
|
||||
error.append("Root emitting MatemathicsOperator (i.e. ");
|
||||
error.append(this.operatorKey);
|
||||
error.append(")");
|
||||
error.append(" contains ");
|
||||
if(mandatory) {
|
||||
error.append("mandatory ");
|
||||
}
|
||||
error.append(key);
|
||||
error.append(" which must be a");
|
||||
error.append(clazz.getSimpleName());
|
||||
error.append(". This is a client error. Please fix your query");
|
||||
logger.error(error.toString());
|
||||
throw new BadRequestException(error.toString());
|
||||
}
|
||||
|
||||
public String generateFieldToEmit(JsonNode jsonNode, String fieldPrefix) {
|
||||
JsonNode jn = jsonNode.get(VALUES_KEY);
|
||||
if(jn.isNull() || !jn.isArray()) {
|
||||
getError(VALUES_KEY, ArrayNode.class, true);
|
||||
}
|
||||
String fieldSeparator = null;
|
||||
if(this == MatemathicsOperator.SUM && jsonNode.has(SEPARATOR_KEY)) {
|
||||
JsonNode sep = jsonNode.get(SEPARATOR_KEY);
|
||||
if(!sep.isTextual()) {
|
||||
getError(SEPARATOR_KEY, TextNode.class, false);
|
||||
}
|
||||
fieldSeparator = sep.asText();
|
||||
}
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append("(");
|
||||
sb.append(generateFieldToEmit((ArrayNode) jn, fieldSeparator, fieldPrefix));
|
||||
JsonNode jnAs = jsonNode.get(AS_KEY);
|
||||
if(jnAs.isNull() || !jnAs.isTextual()) {
|
||||
getError(AS_KEY, TextNode.class, true);
|
||||
}
|
||||
sb.append(") AS `");
|
||||
sb.append(jnAs.asText());
|
||||
sb.append("`");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
protected StringBuffer addFieldPrefix(StringBuffer sb, String fieldPrefix) {
|
||||
if(fieldPrefix !=null && fieldPrefix.trim().length()>0) {
|
||||
sb.append(fieldPrefix.trim());
|
||||
sb.append(".");
|
||||
}
|
||||
return sb;
|
||||
}
|
||||
|
||||
protected StringBuffer generateFieldToEmit(ArrayNode arrayNode, String fieldsSeparator, String fieldPrefix) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
int size = arrayNode.size();
|
||||
for(int i=0; i<size; i++) {
|
||||
JsonNode jn = arrayNode.get(i);
|
||||
if(jn.isObject()) {
|
||||
ObjectNode on = (ObjectNode) jn;
|
||||
String key = on.fieldNames().next();
|
||||
MatemathicsOperator mo = MatemathicsOperator.getOperator(key);
|
||||
ArrayNode an = (ArrayNode) on.get(key);
|
||||
buffer.append("(");
|
||||
buffer.append(mo.generateFieldToEmit(an, null, fieldPrefix));
|
||||
buffer.append(")");
|
||||
}
|
||||
|
||||
if(jn.isTextual()) {
|
||||
buffer = addFieldPrefix(buffer, fieldPrefix);
|
||||
buffer.append(jn.asText());
|
||||
}
|
||||
|
||||
if(i<(size-1)) {
|
||||
buffer.append(" ");
|
||||
buffer.append(dbOperator);
|
||||
buffer.append(" ");
|
||||
if(fieldsSeparator!=null) {
|
||||
buffer.append("'");
|
||||
buffer.append(fieldsSeparator);
|
||||
buffer.append("' ");
|
||||
buffer.append(dbOperator);
|
||||
buffer.append(" ");
|
||||
}
|
||||
}
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
* See https://www.orientdb.com/docs/3.0.x/sql/SQL-Where.html
|
||||
*/
|
||||
public enum ProjectionOperator {
|
||||
|
||||
EMIT("_emit", "");
|
||||
|
||||
protected final String operator;
|
||||
protected final String description;
|
||||
|
||||
private ProjectionOperator(String operator, String description) {
|
||||
this.operator = operator;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getOperator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,ProjectionOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
ProjectionOperator.operators = new HashSet<>();
|
||||
ProjectionOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(ProjectionOperator po : ProjectionOperator.values()) {
|
||||
ProjectionOperator.operators.add(po.getOperator());
|
||||
ProjectionOperator.operatorByKey.put(po.getOperator(), po);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return ProjectionOperator.operators;
|
||||
}
|
||||
|
||||
public static ProjectionOperator getQueryLogicalOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public enum QueryConditionalOperator {
|
||||
|
||||
EQ("$eq", " = ", "Matches values that are equal to a specified value."),
|
||||
GT("$gt", " > ", "Matches values that are greater than a specified value."),
|
||||
GTE("$gte", " >= ", "Matches values that are greater than or equal to a specified value."),
|
||||
LT("$lt", " < ", "Matches values that are less than a specified value."),
|
||||
LTE("$lte", " <= ", "Matches values that are less than or equal to a specified value."),
|
||||
NE("$ne", " <> ", "Matches all values that are not equal to a specified value."),
|
||||
|
||||
IN("$in", " IN ", "Matches any of the values specified in an array.");
|
||||
|
||||
protected final String operator;
|
||||
protected final String conditionalOperator;
|
||||
protected final String description;
|
||||
|
||||
private QueryConditionalOperator(String operator, String conditionalOperator, String description) {
|
||||
this.operator = operator;
|
||||
this.conditionalOperator = conditionalOperator;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getOperator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
public String getConditionalOperator() {
|
||||
return conditionalOperator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,QueryConditionalOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
QueryConditionalOperator.operators = new HashSet<>();
|
||||
QueryConditionalOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(QueryConditionalOperator queryComparisonOperator : QueryConditionalOperator.values()) {
|
||||
QueryConditionalOperator.operators.add(queryComparisonOperator.getOperator());
|
||||
QueryConditionalOperator.operatorByKey.put(queryComparisonOperator.getOperator(), queryComparisonOperator);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return QueryConditionalOperator.operators;
|
||||
}
|
||||
|
||||
public static QueryConditionalOperator getQueryComparisonOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public enum QueryLogicalOperator {
|
||||
|
||||
AND("$and", " AND ", "true if both the conditions are true"),
|
||||
OR("$or", " OR ", "true if at least one of the condition is true"),
|
||||
NOT("$not", " NOT ", "true if the condition is false.");
|
||||
|
||||
protected final String operator;
|
||||
protected final String logicalOperator;
|
||||
protected final String description;
|
||||
|
||||
private QueryLogicalOperator(String operator, String logicalOperator, String description) {
|
||||
this.operator = operator;
|
||||
this.logicalOperator = logicalOperator;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getOperator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
public String getLogicalOperator() {
|
||||
return logicalOperator;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
private static Set<String> operators;
|
||||
private static Map<String,QueryLogicalOperator> operatorByKey;
|
||||
|
||||
static {
|
||||
QueryLogicalOperator.operators = new HashSet<>();
|
||||
QueryLogicalOperator.operatorByKey = new HashMap<>();
|
||||
|
||||
for(QueryLogicalOperator queryLogicalOperator : QueryLogicalOperator.values()) {
|
||||
QueryLogicalOperator.operators.add(queryLogicalOperator.getOperator());
|
||||
QueryLogicalOperator.operatorByKey.put(queryLogicalOperator.getOperator(), queryLogicalOperator);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<String> getOperators() {
|
||||
return QueryLogicalOperator.operators;
|
||||
}
|
||||
|
||||
public static QueryLogicalOperator getQueryLogicalOperator(String key) {
|
||||
return operatorByKey.get(key);
|
||||
}
|
||||
}
|
|
@ -3,8 +3,6 @@ package org.gcube.informationsystem.resourceregistry.queries.templates;
|
|||
import java.util.HashMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.ws.rs.BadRequestException;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -30,6 +28,8 @@ import org.gcube.informationsystem.resourceregistry.instances.base.ElementManage
|
|||
import org.gcube.informationsystem.resourceregistry.instances.base.entities.EntityElementManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.Operation;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.JsonQuery;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.OrientDBUtility;
|
||||
import org.gcube.informationsystem.serialization.ElementMapper;
|
||||
import org.gcube.informationsystem.types.reference.entities.EntityType;
|
||||
|
@ -348,13 +348,28 @@ public class QueryTemplateManagement extends EntityElementManagement<QueryTempla
|
|||
public String reallyGetAll(boolean polymorphic) throws ResourceRegistryException {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
ArrayNode arrayNode = objectMapper.createArrayNode();
|
||||
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
int limit = requestInfo.getLimit();
|
||||
int offset = requestInfo.getOffset();
|
||||
|
||||
int position = -1;
|
||||
int count = 0;
|
||||
|
||||
Iterable<ODocument> iterable = oDatabaseDocument.browseClass(typeName, polymorphic);
|
||||
for (ODocument vertex : iterable) {
|
||||
if(++position < offset) {
|
||||
continue;
|
||||
}
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setElement((OVertex) vertex);
|
||||
try {
|
||||
JsonNode jsonObject = queryTemplateManagement.serializeAsJsonNode();
|
||||
arrayNode.add(jsonObject);
|
||||
if(limit > 0 && ++count >= limit) {
|
||||
break;
|
||||
}
|
||||
} catch (ResourceRegistryException e) {
|
||||
logger.error("Unable to correctly serialize {}. It will be excluded from results. {}",
|
||||
vertex.toString(), OrientDBUtility.SHOULD_NOT_OCCUR_ERROR_MESSAGE);
|
||||
|
|
|
@ -4,8 +4,7 @@ import java.util.List;
|
|||
|
||||
import javax.ws.rs.core.UriInfo;
|
||||
|
||||
import org.gcube.informationsystem.base.reference.IdentifiableElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.Metadata;
|
||||
import org.gcube.informationsystem.resourceregistry.api.request.BaseRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.api.request.RequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.api.rest.InstancePath;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -14,39 +13,19 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class ServerRequestInfo implements RequestInfo {
|
||||
public class ServerRequestInfo extends BaseRequestInfo implements RequestInfo {
|
||||
|
||||
protected static Logger logger = LoggerFactory.getLogger(ServerRequestInfo.class);
|
||||
|
||||
protected UriInfo uriInfo;
|
||||
|
||||
/**
|
||||
* Track if the request requested to include {@link Metadata}
|
||||
*/
|
||||
protected boolean includeMeta;
|
||||
|
||||
/**
|
||||
* Track if the request requested to include {@link Metadata} in all
|
||||
* {@link IdentifiableElement} or just in the root instance
|
||||
*/
|
||||
protected boolean allMeta;
|
||||
|
||||
|
||||
/**
|
||||
* Track if hierarchicalMode has been requested
|
||||
*/
|
||||
protected boolean hierarchicalMode;
|
||||
|
||||
/**
|
||||
* Track if the request requested to include contexts
|
||||
*/
|
||||
protected boolean includeContexts;
|
||||
|
||||
public ServerRequestInfo() {
|
||||
this.includeMeta = false;
|
||||
this.allMeta = false;
|
||||
this.hierarchicalMode = false;
|
||||
this.includeContexts = false;
|
||||
super();
|
||||
this.uriInfo = null;
|
||||
}
|
||||
|
||||
public ServerRequestInfo(int offset, int limit) {
|
||||
super(offset, limit);
|
||||
this.uriInfo = null;
|
||||
}
|
||||
|
||||
|
@ -77,6 +56,14 @@ public class ServerRequestInfo implements RequestInfo {
|
|||
// TODO check is the user has the role to request such parameter
|
||||
return true;
|
||||
|
||||
case InstancePath.LIMIT_QUERY_PARAMETER:
|
||||
// TODO check is the user has the role to request such parameter
|
||||
return true;
|
||||
|
||||
case InstancePath.OFFSET_QUERY_PARAMETER:
|
||||
// TODO check is the user has the role to request such parameter
|
||||
return true;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -88,27 +75,35 @@ public class ServerRequestInfo implements RequestInfo {
|
|||
* Set the parameter if the user is allowed otherwise the default is maintained
|
||||
* @param queryParameterKey requested query parameter
|
||||
* @param bool the value to set
|
||||
* @return the value of variable corresponding the request parameter independetly if
|
||||
* the value has been set.
|
||||
* @param forceAllowed force the value and skip the isAllowed check
|
||||
* @return the value of variable corresponding the request parameter
|
||||
* independently if the value has been set.
|
||||
*/
|
||||
public boolean setIfAllowed(String queryParameterKey, boolean bool) {
|
||||
boolean toBeSet = bool && isAllowed(queryParameterKey);
|
||||
public boolean setIfAllowed(String queryParameterKey, boolean bool, boolean forceAllowed) {
|
||||
|
||||
switch (queryParameterKey) {
|
||||
case InstancePath.INCLUDE_META_QUERY_PARAMETER:
|
||||
includeMeta = toBeSet;
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
includeMeta = bool;
|
||||
}
|
||||
return includeMeta;
|
||||
|
||||
case InstancePath.INCLUDE_META_IN_ALL_INSTANCES_QUERY_PARAMETER:
|
||||
allMeta = toBeSet;
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
allMeta = bool;
|
||||
}
|
||||
return allMeta;
|
||||
|
||||
case InstancePath.INCLUDE_CONTEXTS_QUERY_PARAMETER:
|
||||
includeContexts = toBeSet;
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
includeContexts = bool;
|
||||
}
|
||||
return includeContexts;
|
||||
|
||||
case InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER:
|
||||
hierarchicalMode = toBeSet;
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
hierarchicalMode = bool;
|
||||
}
|
||||
return hierarchicalMode;
|
||||
|
||||
default:
|
||||
|
@ -118,7 +113,59 @@ public class ServerRequestInfo implements RequestInfo {
|
|||
return false;
|
||||
}
|
||||
|
||||
public void checkQueryParameter(String queryParameterKey) {
|
||||
/**
|
||||
* Set the parameter if the user is allowed otherwise the default is maintained
|
||||
* @param queryParameterKey requested query parameter
|
||||
* @param bool the value to set
|
||||
* @return the value of variable corresponding the request parameter
|
||||
* independently if the value has been set.
|
||||
*/
|
||||
public boolean setIfAllowed(String queryParameterKey, boolean bool) {
|
||||
return setIfAllowed(queryParameterKey, bool, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the parameter if the user is allowed otherwise the default is maintained
|
||||
* @param queryParameterKey requested query parameter
|
||||
* @param integer the int value to set
|
||||
* @param forceAllowed force the value and skip the isAllowed check
|
||||
* @return the value of variable corresponding the request parameter independently if
|
||||
* the value has been set.
|
||||
*/
|
||||
public int setIfAllowed(String queryParameterKey, int integer, boolean forceAllowed) {
|
||||
|
||||
switch (queryParameterKey) {
|
||||
case InstancePath.LIMIT_QUERY_PARAMETER:
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
limit = integer;
|
||||
}
|
||||
return limit;
|
||||
|
||||
case InstancePath.OFFSET_QUERY_PARAMETER:
|
||||
if(forceAllowed || isAllowed(queryParameterKey)) {
|
||||
offset = integer;
|
||||
}
|
||||
return offset;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the parameter if the user is allowed otherwise the default is maintained
|
||||
* @param queryParameterKey requested query parameter
|
||||
* @param integer the int value to set
|
||||
* @return the value of variable corresponding the request parameter independently if
|
||||
* the value has been set.
|
||||
*/
|
||||
public int setIfAllowed(String queryParameterKey, int integer) {
|
||||
return setIfAllowed(queryParameterKey, integer, false);
|
||||
}
|
||||
|
||||
public void checkBooleanQueryParameter(String queryParameterKey) {
|
||||
try {
|
||||
List<String> queryParameterList = uriInfo.getQueryParameters().get(queryParameterKey);
|
||||
if(queryParameterList!=null && queryParameterList.size()>0) {
|
||||
|
@ -127,7 +174,26 @@ public class ServerRequestInfo implements RequestInfo {
|
|||
setIfAllowed(queryParameterKey, bool);
|
||||
}
|
||||
}catch (Throwable t) {
|
||||
logger.warn("Unable to properly set the Hierarchical Mode is set", t.getMessage());
|
||||
logger.warn("Unable to properly set " + queryParameterKey, t.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void checkIntegerQueryParameter(String queryParameterKey) {
|
||||
checkIntegerQueryParameter(queryParameterKey, null);
|
||||
}
|
||||
|
||||
public void checkIntegerQueryParameter(String queryParameterKey, Integer defaultValue) {
|
||||
try {
|
||||
List<String> queryParameterList = uriInfo.getQueryParameters().get(queryParameterKey);
|
||||
if(queryParameterList!=null && queryParameterList.size()>0) {
|
||||
String intString = queryParameterList.get(0);
|
||||
int integer = Integer.valueOf(intString);
|
||||
setIfAllowed(queryParameterKey, integer);
|
||||
}else if(defaultValue!=null) {
|
||||
setIfAllowed(queryParameterKey, defaultValue, true);
|
||||
}
|
||||
}catch (Throwable t) {
|
||||
logger.warn("Unable to properly set " + queryParameterKey, t.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,59 +201,28 @@ public class ServerRequestInfo implements RequestInfo {
|
|||
this.uriInfo = uriInfo;
|
||||
}
|
||||
|
||||
public void checkAllQueryParameters() {
|
||||
checkIncludeQueryParameters();
|
||||
checkQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
public void checkAllBooleanQueryParameters() {
|
||||
checkAllIncludeQueryParameters();
|
||||
checkBooleanQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
}
|
||||
|
||||
public void checkIncludeQueryParameters() {
|
||||
public void checkAllIncludeQueryParameters() {
|
||||
checkIncludeAllMetaQueryParameters();
|
||||
checkQueryParameter(InstancePath.INCLUDE_CONTEXTS_QUERY_PARAMETER);
|
||||
checkBooleanQueryParameter(InstancePath.INCLUDE_CONTEXTS_QUERY_PARAMETER);
|
||||
}
|
||||
|
||||
public void checkIncludeAllMetaQueryParameters() {
|
||||
checkQueryParameter(InstancePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
checkQueryParameter(InstancePath.INCLUDE_META_IN_ALL_INSTANCES_QUERY_PARAMETER);
|
||||
checkBooleanQueryParameter(InstancePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
checkBooleanQueryParameter(InstancePath.INCLUDE_META_IN_ALL_INSTANCES_QUERY_PARAMETER);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeMeta() {
|
||||
return includeMeta;
|
||||
public void checkLimitOffset(int offset, int limit) {
|
||||
checkIntegerQueryParameter(InstancePath.OFFSET_QUERY_PARAMETER, offset);
|
||||
checkIntegerQueryParameter(InstancePath.LIMIT_QUERY_PARAMETER, limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setIncludeMeta(boolean includeMeta) {
|
||||
this.includeMeta = includeMeta;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allMeta() {
|
||||
return allMeta;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAllMeta(boolean allMeta) {
|
||||
this.allMeta = allMeta;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isHierarchicalMode() {
|
||||
return hierarchicalMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setHierarchicalMode(boolean hierarchicalMode) {
|
||||
this.hierarchicalMode = hierarchicalMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeContexts() {
|
||||
return includeContexts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setIncludeContexts(boolean includeContexts) {
|
||||
this.includeContexts = includeContexts;
|
||||
public void checkLimitOffset() {
|
||||
checkLimitOffset(DEFAULT_OFFSET, DEFAULT_LIMIT);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegis
|
|||
import org.gcube.informationsystem.resourceregistry.api.exceptions.contexts.ContextNotFoundException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.queries.InvalidQueryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaNotFoundException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.request.BaseRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.api.rest.AccessPath;
|
||||
import org.gcube.informationsystem.resourceregistry.api.rest.ContextPath;
|
||||
import org.gcube.informationsystem.resourceregistry.api.rest.InstancePath;
|
||||
|
@ -79,9 +80,10 @@ public class Access extends BaseRest {
|
|||
logger.info("Requested to read all {}s", org.gcube.informationsystem.contexts.reference.entities.Context.NAME);
|
||||
setAccountingMethod(Method.LIST, org.gcube.informationsystem.contexts.reference.entities.Context.NAME);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo(BaseRequestInfo.DEFAULT_OFFSET, BaseRequestInfo.UNBOUNDED_LIMIT);
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
return contextManagement.all(false);
|
||||
|
@ -104,7 +106,7 @@ public class Access extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
contextManagement.setUUID(UUID.fromString(uuid));
|
||||
|
@ -127,7 +129,7 @@ public class Access extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
TypeManagement typeManagement = new TypeManagement();
|
||||
typeManagement.setTypeName(type);
|
||||
|
@ -152,7 +154,8 @@ public class Access extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
return queryTemplateManagement.all(false);
|
||||
|
@ -172,7 +175,7 @@ public class Access extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setName(queryTemplateName);
|
||||
|
@ -195,7 +198,7 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.RUN, QueryTemplate.NAME);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setName(queryTemplateName);
|
||||
|
@ -219,7 +222,8 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.LIST, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(type);
|
||||
return erManagement.all(polymorphic);
|
||||
|
@ -239,7 +243,7 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.EXIST, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(type);
|
||||
|
||||
|
@ -276,7 +280,7 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.READ, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
ElementManagement erManagement = ElementManagementUtility.getERManagement(type);
|
||||
|
@ -317,6 +321,9 @@ public class Access extends BaseRest {
|
|||
*
|
||||
* e.g. GET /access/query?q=SELECT FROM V
|
||||
*
|
||||
* It is responsibility of the client impose manage paginated results
|
||||
* according the SQL syntax (see SKIP and LIMIT parameters in the documentation indicated above)
|
||||
*
|
||||
* @param query Defines the query to send to the backend.
|
||||
* @param raw request a raw response (not a Element based response)
|
||||
* @return The JSON representation of the result
|
||||
|
@ -338,9 +345,9 @@ public class Access extends BaseRest {
|
|||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
if(raw) {
|
||||
// TODO Check if the role allow to request raw data
|
||||
serverRequestInfo.checkQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
}else {
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
}
|
||||
|
||||
Query queryManager = new QueryImpl();
|
||||
|
@ -395,7 +402,8 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.QUERY, JSON_QUERY_METHOD);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
JsonQuery jsonQueryManager = new JsonQuery();
|
||||
jsonQueryManager.setJsonQuery(jsonQuery);
|
||||
|
@ -418,12 +426,12 @@ public class Access extends BaseRest {
|
|||
* All the Resources with a ContactFacet :
|
||||
* /access/query/Resource/ConsistsOf/ContactFacet?_polymorphic=true&_direction=out
|
||||
*
|
||||
* All the Eservice having an incoming (IN) Hosts relation with an HostingNode (i.e. all smartgears services)
|
||||
* All the EService having an incoming (IN) Hosts relation with an HostingNode (i.e. all smartgears services)
|
||||
* GET /access/query/EService/Hosts/HostingNode?_polymorphic=true&_direction=in
|
||||
*
|
||||
* All the Eservice having an incoming (IN) Hosts relation (i.e. hosted by) the HostingNode with UUID
|
||||
* All the EService having an incoming (IN) Hosts relation (i.e. hosted by) the HostingNode with UUID
|
||||
* 16032d09-3823-444e-a1ff-a67de4f350a
|
||||
* * GET /access/query/EService/hosts/HostingNode?_reference=16032d09-3823-444e-a1ff-a67de4f350a8&_polymorphic=true&_direction=in
|
||||
* * GET /access/query/EService/Hosts/HostingNode?_reference=16032d09-3823-444e-a1ff-a67de4f350a8&_polymorphic=true&_direction=in
|
||||
*
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes"})
|
||||
|
@ -446,7 +454,8 @@ public class Access extends BaseRest {
|
|||
setAccountingMethod(Method.QUERY, PREPARED_QUERY_METHOD);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
ElementManagement erManagement = ElementManagementUtility.getERManagement(resourcetype);
|
||||
|
||||
|
|
|
@ -35,15 +35,20 @@ public class BaseRest {
|
|||
setAccountingMethod(accountingMethod.toString());
|
||||
}
|
||||
|
||||
|
||||
|
||||
protected ServerRequestInfo initRequestInfo() {
|
||||
ServerRequestInfo requestInfo = new ServerRequestInfo();
|
||||
private ServerRequestInfo initRequestInfo(ServerRequestInfo requestInfo) {
|
||||
requestInfo.setUriInfo(uriInfo);
|
||||
|
||||
RequestUtility.getRequestInfo().set(requestInfo);
|
||||
|
||||
return requestInfo;
|
||||
}
|
||||
|
||||
protected ServerRequestInfo initRequestInfo(int offset, int limit) {
|
||||
ServerRequestInfo requestInfo = new ServerRequestInfo(offset, limit);
|
||||
return initRequestInfo(requestInfo);
|
||||
}
|
||||
|
||||
protected ServerRequestInfo initRequestInfo() {
|
||||
ServerRequestInfo requestInfo = new ServerRequestInfo();
|
||||
return initRequestInfo(requestInfo);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.gcube.informationsystem.contexts.reference.entities.Context;
|
|||
import org.gcube.informationsystem.resourceregistry.ResourceInitializer;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.contexts.ContextNotFoundException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.request.BaseRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.api.rest.ContextPath;
|
||||
import org.gcube.informationsystem.resourceregistry.contexts.ContextUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.contexts.entities.ContextManagement;
|
||||
|
@ -46,9 +47,10 @@ public class ContextManager extends BaseRest {
|
|||
logger.info("Requested to read all {}s", Context.NAME);
|
||||
setAccountingMethod(Method.LIST, Context.NAME);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo(BaseRequestInfo.DEFAULT_OFFSET, BaseRequestInfo.UNBOUNDED_LIMIT);
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
return contextManagement.all(false);
|
||||
|
@ -73,7 +75,7 @@ public class ContextManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
contextManagement.setUUID(UUID.fromString(uuid));
|
||||
|
@ -98,7 +100,7 @@ public class ContextManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
contextManagement.setUUID(UUID.fromString(uuid));
|
||||
|
|
|
@ -59,7 +59,8 @@ public class InstancesManager extends BaseRest {
|
|||
setAccountingMethod(Method.LIST, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(type);
|
||||
return erManagement.all(polymorphic);
|
||||
|
@ -80,7 +81,7 @@ public class InstancesManager extends BaseRest {
|
|||
setAccountingMethod(Method.EXIST, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(InstancePath.HIERARCHICAL_MODE_QUERY_PARAMETER);
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
ElementManagement erManagement = ElementManagementUtility.getERManagement(type);
|
||||
|
@ -118,7 +119,7 @@ public class InstancesManager extends BaseRest {
|
|||
setAccountingMethod(Method.READ, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(type);
|
||||
erManagement.setElementType(type);
|
||||
|
@ -144,7 +145,7 @@ public class InstancesManager extends BaseRest {
|
|||
setAccountingMethod(Method.UPDATE, InstancesManager.INSTANCE);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkIncludeQueryParameters();
|
||||
serverRequestInfo.checkAllIncludeQueryParameters();
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
ElementManagement erManagement = ElementManagementUtility.getERManagement(type);
|
||||
|
|
|
@ -46,7 +46,8 @@ public class QueryTemplateManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkLimitOffset();
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
return queryTemplateManagement.all(false);
|
||||
|
@ -93,7 +94,7 @@ public class QueryTemplateManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setName(queryTemplateName);
|
||||
|
@ -115,7 +116,7 @@ public class QueryTemplateManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(ContextPath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setName(queryTemplateName);
|
||||
|
@ -139,7 +140,7 @@ public class QueryTemplateManager extends BaseRest {
|
|||
setAccountingMethod(Method.RUN, QueryTemplate.NAME);
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkAllQueryParameters();
|
||||
serverRequestInfo.checkAllBooleanQueryParameters();
|
||||
|
||||
QueryTemplateManagement queryTemplateManagement = new QueryTemplateManagement();
|
||||
queryTemplateManagement.setName(queryTemplateName);
|
||||
|
|
|
@ -89,7 +89,7 @@ public class SharingManager extends BaseRest {
|
|||
throws SchemaViolationException, ResourceNotFoundException, ContextNotFoundException, ResourceRegistryException {
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkIncludeQueryParameters();
|
||||
serverRequestInfo.checkAllIncludeQueryParameters();
|
||||
|
||||
try {
|
||||
StringBuffer calledMethod = new StringBuffer();
|
||||
|
@ -171,7 +171,7 @@ public class SharingManager extends BaseRest {
|
|||
throws SchemaViolationException, ResourceNotFoundException, ContextNotFoundException, ResourceRegistryException {
|
||||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.checkIncludeQueryParameters();
|
||||
serverRequestInfo.checkAllIncludeQueryParameters();
|
||||
|
||||
StringBuffer calledMethod = new StringBuffer();
|
||||
if(dryRun==null) {
|
||||
|
|
|
@ -5,11 +5,13 @@ import java.util.List;
|
|||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.DefaultValue;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.InternalServerErrorException;
|
||||
import javax.ws.rs.PUT;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.Response.Status;
|
||||
|
@ -36,6 +38,23 @@ public class TypeManager extends BaseRest {
|
|||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean the types cache
|
||||
* @return
|
||||
*/
|
||||
// @DELETE
|
||||
public Response cleanCache() {
|
||||
try {
|
||||
// TODO
|
||||
return Response.status(Status.NO_CONTENT).build();
|
||||
}catch (WebApplicationException e) {
|
||||
throw e;
|
||||
}catch (Exception e) {
|
||||
throw new InternalServerErrorException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* PUT /types/{TYPE_NAME}
|
||||
* e.g. PUT /types/ContactFacet
|
||||
|
@ -54,7 +73,7 @@ public class TypeManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
TypeManagement schemaManagement = new TypeManagement();
|
||||
schemaManagement.setTypeName(typeName);
|
||||
|
@ -80,7 +99,7 @@ public class TypeManager extends BaseRest {
|
|||
|
||||
ServerRequestInfo serverRequestInfo = initRequestInfo();
|
||||
serverRequestInfo.setAllMeta(true);
|
||||
serverRequestInfo.checkQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
serverRequestInfo.checkBooleanQueryParameter(TypePath.INCLUDE_META_QUERY_PARAMETER);
|
||||
|
||||
TypeManagement schemaManagement = new TypeManagement();
|
||||
schemaManagement.setTypeName(type);
|
||||
|
|
|
@ -24,6 +24,7 @@ public class OrientDBTypeMapping {
|
|||
static {
|
||||
BASE_TYPE_TO_OTYPE = new HashMap<>();
|
||||
|
||||
BASE_TYPE_TO_OTYPE.put(BaseType.ANY, OType.ANY);
|
||||
BASE_TYPE_TO_OTYPE.put(BaseType.BOOLEAN, OType.BOOLEAN);
|
||||
BASE_TYPE_TO_OTYPE.put(BaseType.INTEGER, OType.INTEGER);
|
||||
BASE_TYPE_TO_OTYPE.put(BaseType.SHORT, OType.SHORT);
|
||||
|
|
|
@ -362,12 +362,12 @@ public class TypeManagement {
|
|||
*/
|
||||
if(!typeList.contains(type.getName())) {
|
||||
switch(propertyTypeName.getBaseType()) {
|
||||
case LIST:
|
||||
throw new UnsupportedDataTypeException(OType.EMBEDDEDLIST
|
||||
+ " support is currently disabled due to OrientDB bug see https://github.com/orientechnologies/orientdb/issues/7354");
|
||||
case SET:
|
||||
throw new UnsupportedDataTypeException(OType.EMBEDDEDSET
|
||||
+ " support is currently disabled due to OrientDB bug see https://github.com/orientechnologies/orientdb/issues/7354");
|
||||
// case LIST:
|
||||
// throw new UnsupportedDataTypeException(OType.EMBEDDEDLIST
|
||||
// + " support is currently disabled due to OrientDB bug see https://github.com/orientechnologies/orientdb/issues/7354");
|
||||
// case SET:
|
||||
// throw new UnsupportedDataTypeException(OType.EMBEDDEDSET
|
||||
// + " support is currently disabled due to OrientDB bug see https://github.com/orientechnologies/orientdb/issues/7354");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -798,7 +798,11 @@ public class TypeManagement {
|
|||
oSchema.dropClass(typeName);
|
||||
|
||||
ElementManagement<?,?> erManagement = getTypeManagement(accessType, typeName);
|
||||
erManagement.delete();
|
||||
// If we need to delete an internal type for maintenance we this this check
|
||||
// otherwise we have a null pointer exception
|
||||
if(erManagement!=null) {
|
||||
erManagement.delete();
|
||||
}
|
||||
|
||||
oDatabaseDocument.commit();
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import com.orientechnologies.orient.core.sql.executor.OResultSet;
|
|||
/**
|
||||
* @author Luca Frosini (ISTI - CNR)
|
||||
*/
|
||||
public class PropertyTypeDefinitionManagement extends ElementManagement<OElement, PropertyType<?>> {
|
||||
public class PropertyTypeDefinitionManagement extends ElementManagement<OElement, PropertyType> {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(PropertyTypeDefinitionManagement.class);
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
import org.gcube.informationsystem.base.reference.properties.PropertyElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.Encrypted;
|
||||
import org.gcube.informationsystem.model.reference.properties.Property;
|
||||
import org.gcube.informationsystem.resourceregistry.dbinitialization.DatabaseEnvironment;
|
||||
|
@ -46,7 +45,7 @@ public class EncryptedOrient extends ODocument implements Encrypted {
|
|||
public List<String> getSupertypes() {
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<PropertyElement>> cachedType = (CachedType<PropertyType<PropertyElement>>) typesCache.getCachedType(getTypeName());
|
||||
CachedType<PropertyType> cachedType = (CachedType<PropertyType>) typesCache.getCachedType(getTypeName());
|
||||
try {
|
||||
return cachedType.getSuperTypes();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -5,7 +5,6 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.informationsystem.base.reference.properties.PropertyElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.Metadata;
|
||||
import org.gcube.informationsystem.model.reference.properties.Property;
|
||||
import org.gcube.informationsystem.resourceregistry.types.CachedType;
|
||||
|
@ -37,7 +36,7 @@ public class MetadataOrient extends ODocument implements Metadata {
|
|||
public List<String> getSupertypes() {
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<PropertyElement>> cachedType = (CachedType<PropertyType<PropertyElement>>) typesCache.getCachedType(getTypeName());
|
||||
CachedType<PropertyType> cachedType = (CachedType<PropertyType>) typesCache.getCachedType(getTypeName());
|
||||
try {
|
||||
return cachedType.getSuperTypes();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
package org.gcube.informationsystem.resourceregistry.utils;
|
||||
package org.gcube.informationsystem.resourceregistry.utils;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.UUID;
|
||||
|
|
|
@ -4,7 +4,6 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.informationsystem.base.reference.properties.PropertyElement;
|
||||
import org.gcube.informationsystem.model.reference.properties.PropagationConstraint;
|
||||
import org.gcube.informationsystem.model.reference.properties.Property;
|
||||
import org.gcube.informationsystem.resourceregistry.types.CachedType;
|
||||
|
@ -36,7 +35,7 @@ public class PropagationConstraintOrient extends ODocument implements Propagatio
|
|||
public List<String> getSupertypes() {
|
||||
TypesCache typesCache = TypesCache.getInstance();
|
||||
@SuppressWarnings("unchecked")
|
||||
CachedType<PropertyType<PropertyElement>> cachedType = (CachedType<PropertyType<PropertyElement>>) typesCache.getCachedType(getTypeName());
|
||||
CachedType<PropertyType> cachedType = (CachedType<PropertyType>) typesCache.getCachedType(getTypeName());
|
||||
try {
|
||||
return cachedType.getSuperTypes();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -47,6 +47,8 @@ public class ContextTest {
|
|||
public static final String PASSWORD_PROPERTY_KEY = "password";
|
||||
public static final String CLIENT_ID_PROPERTY_KEY = "clientId";
|
||||
|
||||
public static final String RESOURCE_REGISTRY_URL_PROPERTY = "RESOURCE_REGISTRY_URL";
|
||||
|
||||
static {
|
||||
GCUBE = "/gcube";
|
||||
DEVNEXT = GCUBE + "/devNext";
|
||||
|
@ -54,7 +56,7 @@ public class ContextTest {
|
|||
DEVSEC = GCUBE + "/devsec";
|
||||
DEVVRE = DEVSEC + "/devVRE";
|
||||
|
||||
PARENT_DEFAULT_TEST_SCOPE = "/gcube";
|
||||
PARENT_DEFAULT_TEST_SCOPE = GCUBE;
|
||||
DEFAULT_TEST_SCOPE = DEVNEXT;
|
||||
ALTERNATIVE_TEST_SCOPE = NEXTNEXT;
|
||||
|
||||
|
@ -63,7 +65,6 @@ public class ContextTest {
|
|||
try {
|
||||
// load the properties file
|
||||
properties.load(input);
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
package org.gcube.informationsystem.resourceregistry.contexts;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
@ -441,7 +443,13 @@ public class ContextManagementTest extends ContextTest {
|
|||
}
|
||||
|
||||
private List<Context> getAll() throws Exception {
|
||||
return getAll(0, -1);
|
||||
}
|
||||
|
||||
private List<Context> getAll(Integer forceOffset, Integer forcelimit) throws Exception {
|
||||
ContextManagement contextManagement = new ContextManagement();
|
||||
contextManagement.setForceOffset(forceOffset);
|
||||
contextManagement.setForceLimit(forcelimit);
|
||||
String allString = contextManagement.all(false);
|
||||
logger.trace(allString);
|
||||
List<Context> all = ElementMapper.unmarshalList(Context.class, allString);
|
||||
|
@ -494,11 +502,10 @@ public class ContextManagementTest extends ContextTest {
|
|||
delete(context);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testContextCache() throws Exception {
|
||||
List<Context> contexts = getAll();
|
||||
logger.debug("{}", contexts);
|
||||
logger.info("{}", contexts);
|
||||
|
||||
ServerContextCache contextCache = ServerContextCache.getInstance();
|
||||
Map<UUID, String> uuidToContextFullName = contextCache.getUUIDToContextFullNameAssociation();
|
||||
|
@ -537,4 +544,94 @@ public class ContextManagementTest extends ContextTest {
|
|||
logger.debug("{} - {} : {}", uuid, fullName, context);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLimitOffset() throws Exception {
|
||||
int limit = 2;
|
||||
int offset = limit * 0;
|
||||
|
||||
List<Context> contexts = getAll(offset, limit);
|
||||
|
||||
logger.info("Going to check {}s pagination validity", Context.NAME);
|
||||
|
||||
if(contexts.size()==0) {
|
||||
return;
|
||||
}
|
||||
Assert.assertTrue(contexts.size() <= limit);
|
||||
|
||||
if(contexts.size()< limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
Set<UUID> uuids = new HashSet<>();
|
||||
for(Context context : contexts) {
|
||||
UUID uuid = context.getID();
|
||||
uuids.add(uuid);
|
||||
logger.info("Using getAll({}, {}) found {} with UUID {} and name {}", offset, limit, Context.NAME, uuid, context.getName());
|
||||
}
|
||||
|
||||
offset = limit * 1;
|
||||
contexts = getAll(offset, limit);
|
||||
|
||||
if(contexts.size()>0) {
|
||||
Assert.assertTrue(contexts.size() <= limit);
|
||||
|
||||
for(Context context : contexts) {
|
||||
UUID uuid = context.getID();
|
||||
Assert.assertFalse(uuids.contains(uuid));
|
||||
uuids.add(uuid);
|
||||
logger.info("Using getAll({}, {}) found {} with UUID {} and name {}", offset, limit, Context.NAME, uuid, context.getName());
|
||||
}
|
||||
|
||||
if(contexts.size()<limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
int doubleLimit = limit*2;
|
||||
offset = 0;
|
||||
|
||||
contexts = getAll(0, doubleLimit);
|
||||
|
||||
Assert.assertTrue(contexts.size() <= doubleLimit);
|
||||
|
||||
for(Context context : contexts) {
|
||||
UUID uuid = context.getID();
|
||||
logger.info("Using getAll({}, {}) found {} with UUID {} and name {}", offset, doubleLimit, Context.NAME, uuid, context.getName());
|
||||
Assert.assertTrue(uuids.contains(uuid));
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Going to check all {}s", Context.NAME);
|
||||
|
||||
List<Context> all = getAll();
|
||||
|
||||
uuids = new HashSet<>();
|
||||
|
||||
int i = -1;
|
||||
|
||||
while(true) {
|
||||
offset = ++i * limit;
|
||||
contexts = getAll(offset, limit);
|
||||
for(Context context : contexts) {
|
||||
UUID uuid = context.getID();
|
||||
logger.info("Using getAll({}, {}) found {} with UUID {} and name {}", offset, limit, Context.NAME, uuid, context.getName());
|
||||
uuids.add(uuid);
|
||||
}
|
||||
if(contexts.size()<limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Assert.assertTrue(all.size()==uuids.size());
|
||||
|
||||
for(Context context : all) {
|
||||
UUID uuid = context.getID();
|
||||
Assert.assertTrue(uuids.contains(uuid));
|
||||
logger.info("Using getAll() found {} with UUID {} and name {}", Context.NAME, uuid, context.getName());
|
||||
|
||||
}
|
||||
|
||||
logger.info("{} pagination seems properly working", Context.NAME);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,8 +8,10 @@ import java.net.URL;
|
|||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.encryption.encrypter.StringEncrypter;
|
||||
|
@ -17,6 +19,7 @@ import org.gcube.informationsystem.base.reference.IdentifiableElement;
|
|||
import org.gcube.informationsystem.model.impl.properties.EncryptedImpl;
|
||||
import org.gcube.informationsystem.model.impl.properties.PropagationConstraintImpl;
|
||||
import org.gcube.informationsystem.model.impl.relations.ConsistsOfImpl;
|
||||
import org.gcube.informationsystem.model.reference.ERElement;
|
||||
import org.gcube.informationsystem.model.reference.ModelElement;
|
||||
import org.gcube.informationsystem.model.reference.entities.Facet;
|
||||
import org.gcube.informationsystem.model.reference.entities.Resource;
|
||||
|
@ -30,6 +33,8 @@ import org.gcube.informationsystem.model.reference.relations.ConsistsOf;
|
|||
import org.gcube.informationsystem.model.reference.relations.IsRelatedTo;
|
||||
import org.gcube.informationsystem.resourceregistry.ContextTest;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.ResourceRegistryException;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.base.ElementManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.base.ElementManagementUtility;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.entities.FacetManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.entities.ResourceManagement;
|
||||
import org.gcube.informationsystem.resourceregistry.instances.model.relations.ConsistsOfManagement;
|
||||
|
@ -38,6 +43,7 @@ import org.gcube.informationsystem.resourceregistry.requests.RequestUtility;
|
|||
import org.gcube.informationsystem.resourceregistry.requests.ServerRequestInfo;
|
||||
import org.gcube.informationsystem.resourceregistry.utils.MetadataUtility;
|
||||
import org.gcube.informationsystem.serialization.ElementMapper;
|
||||
import org.gcube.informationsystem.utils.TypeUtility;
|
||||
import org.gcube.resourcemanagement.model.impl.entities.facets.AccessPointFacetImpl;
|
||||
import org.gcube.resourcemanagement.model.impl.entities.facets.CPUFacetImpl;
|
||||
import org.gcube.resourcemanagement.model.impl.entities.facets.EventFacetImpl;
|
||||
|
@ -115,6 +121,8 @@ public class ERManagementTest extends ContextTest {
|
|||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
requestInfo.setIncludeMeta(true);
|
||||
requestInfo.setAllMeta(true);
|
||||
requestInfo.setLimit(1000);
|
||||
requestInfo.setOffset(0);
|
||||
}
|
||||
|
||||
public static SoftwareFacet getSoftwareFacet() {
|
||||
|
@ -171,8 +179,8 @@ public class ERManagementTest extends ContextTest {
|
|||
|
||||
LicenseFacet licenseFacet = new LicenseFacetImpl();
|
||||
licenseFacet.setName("EUPL");
|
||||
licenseFacet.setTextURL(
|
||||
new URL("https://joinup.ec.europa.eu/community/etestAddToContextFromDifferentSourceContextupl/og_page/european-union-public-licence-eupl-v11"));
|
||||
licenseFacet.setTextURL(new URL(
|
||||
"https://joinup.ec.europa.eu/community/etestAddToContextFromDifferentSourceContextupl/og_page/european-union-public-licence-eupl-v11"));
|
||||
eService.addFacet(licenseFacet);
|
||||
|
||||
return eService;
|
||||
|
@ -226,66 +234,71 @@ public class ERManagementTest extends ContextTest {
|
|||
return hostingNode;
|
||||
}
|
||||
|
||||
public static void checkUUIDAndMetadata(IdentifiableElement identifiableElement, IdentifiableElement createdIdentifiableElement) {
|
||||
public static void checkUUIDAndMetadata(IdentifiableElement identifiableElement,
|
||||
IdentifiableElement createdIdentifiableElement) {
|
||||
UUID createdUUID = createdIdentifiableElement.getID();
|
||||
Assert.assertTrue(createdUUID!=null);
|
||||
Assert.assertTrue(createdUUID != null);
|
||||
|
||||
if(identifiableElement.getID()!=null) {
|
||||
Assert.assertTrue(createdUUID.compareTo(identifiableElement.getID())==0);
|
||||
if (identifiableElement.getID() != null) {
|
||||
Assert.assertTrue(createdUUID.compareTo(identifiableElement.getID()) == 0);
|
||||
}
|
||||
|
||||
Metadata createdMetadata = createdIdentifiableElement.getMetadata();
|
||||
Assert.assertTrue(createdMetadata!=null);
|
||||
Assert.assertTrue(createdMetadata != null);
|
||||
|
||||
String createdBy = createdMetadata.getCreatedBy();
|
||||
Assert.assertTrue(createdBy!=null);
|
||||
Assert.assertTrue(createdBy != null);
|
||||
|
||||
String lastUpdateBy = createdMetadata.getLastUpdateBy();
|
||||
Assert.assertTrue(lastUpdateBy!=null);
|
||||
Assert.assertTrue(lastUpdateBy != null);
|
||||
|
||||
Date creationTime = createdMetadata.getCreationTime();
|
||||
Assert.assertTrue(creationTime!=null);
|
||||
Assert.assertTrue(creationTime != null);
|
||||
|
||||
Date lastUpdateTime = createdMetadata.getLastUpdateTime();
|
||||
Assert.assertTrue(lastUpdateTime!=null);
|
||||
Assert.assertTrue(lastUpdateTime != null);
|
||||
Assert.assertTrue(lastUpdateTime.equals(creationTime) || lastUpdateTime.equals(lastUpdateTime));
|
||||
|
||||
Metadata metadata = identifiableElement.getMetadata();
|
||||
if(metadata!=null) {
|
||||
if (metadata != null) {
|
||||
|
||||
if(metadata.getCreatedBy()!=null) {
|
||||
Assert.assertTrue(createdBy.compareTo(metadata.getCreatedBy())==0);
|
||||
}else {
|
||||
Assert.assertTrue(createdBy.compareTo(MetadataUtility.getUser())==0);
|
||||
if (metadata.getCreatedBy() != null) {
|
||||
Assert.assertTrue(createdBy.compareTo(metadata.getCreatedBy()) == 0);
|
||||
} else {
|
||||
Assert.assertTrue(createdBy.compareTo(MetadataUtility.getUser()) == 0);
|
||||
}
|
||||
|
||||
if(metadata.getLastUpdateBy()!=null) {
|
||||
Assert.assertTrue(lastUpdateBy.compareTo(metadata.getLastUpdateBy())==0);
|
||||
}else {
|
||||
Assert.assertTrue(lastUpdateBy.compareTo(MetadataUtility.getUser())==0);
|
||||
if (metadata.getLastUpdateBy() != null) {
|
||||
Assert.assertTrue(lastUpdateBy.compareTo(metadata.getLastUpdateBy()) == 0);
|
||||
} else {
|
||||
Assert.assertTrue(lastUpdateBy.compareTo(MetadataUtility.getUser()) == 0);
|
||||
}
|
||||
|
||||
if(metadata.getLastUpdateTime()!=null) {
|
||||
Assert.assertTrue(lastUpdateTime.after(metadata.getLastUpdateTime()) || lastUpdateTime.compareTo(metadata.getLastUpdateTime())==0);
|
||||
if (metadata.getLastUpdateTime() != null) {
|
||||
Assert.assertTrue(lastUpdateTime.after(metadata.getLastUpdateTime())
|
||||
|| lastUpdateTime.compareTo(metadata.getLastUpdateTime()) == 0);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void checkPropagationConstraint(PropagationConstraint propagationConstraint, PropagationConstraint gotPropagationConstraint) {
|
||||
Assert.assertTrue(propagationConstraint.getAddConstraint()==gotPropagationConstraint.getAddConstraint());
|
||||
Assert.assertTrue(propagationConstraint.getRemoveConstraint()==gotPropagationConstraint.getRemoveConstraint());
|
||||
public static void checkPropagationConstraint(PropagationConstraint propagationConstraint,
|
||||
PropagationConstraint gotPropagationConstraint) {
|
||||
Assert.assertTrue(propagationConstraint.getAddConstraint() == gotPropagationConstraint.getAddConstraint());
|
||||
Assert.assertTrue(
|
||||
propagationConstraint.getRemoveConstraint() == gotPropagationConstraint.getRemoveConstraint());
|
||||
}
|
||||
|
||||
public static void checkConsistOf(ConsistsOf<? extends Resource, ? extends Facet> consistsOf, ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf) {
|
||||
public static void checkConsistOf(ConsistsOf<? extends Resource, ? extends Facet> consistsOf,
|
||||
ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf) {
|
||||
checkUUIDAndMetadata(consistsOf, gotConsistsOf);
|
||||
|
||||
if(consistsOf.getPropagationConstraint()==null) {
|
||||
if (consistsOf.getPropagationConstraint() == null) {
|
||||
PropagationConstraint propagationConstraint = gotConsistsOf.getPropagationConstraint();
|
||||
Assert.assertTrue(propagationConstraint.getAddConstraint()==AddConstraint.propagate);
|
||||
Assert.assertTrue(propagationConstraint.getRemoveConstraint()==RemoveConstraint.cascade);
|
||||
Assert.assertTrue(propagationConstraint.getAddConstraint() == AddConstraint.propagate);
|
||||
Assert.assertTrue(propagationConstraint.getRemoveConstraint() == RemoveConstraint.cascade);
|
||||
checkPropagationConstraint(propagationConstraint, gotConsistsOf.getPropagationConstraint());
|
||||
}else {
|
||||
} else {
|
||||
checkPropagationConstraint(consistsOf.getPropagationConstraint(), gotConsistsOf.getPropagationConstraint());
|
||||
}
|
||||
|
||||
|
@ -293,8 +306,8 @@ public class ERManagementTest extends ContextTest {
|
|||
additionalProperties.remove(ModelElement.SUPERTYPES_PROPERTY);
|
||||
Map<String, Object> gotAdditionalProperties = new HashMap<>(gotConsistsOf.getAdditionalProperties());
|
||||
gotAdditionalProperties.remove(ModelElement.SUPERTYPES_PROPERTY);
|
||||
Assert.assertTrue(additionalProperties.size()==gotAdditionalProperties.size());
|
||||
for(String key : additionalProperties.keySet()) {
|
||||
Assert.assertTrue(additionalProperties.size() == gotAdditionalProperties.size());
|
||||
for (String key : additionalProperties.keySet()) {
|
||||
Assert.assertTrue(gotAdditionalProperties.containsKey(key));
|
||||
Object additionalProperty = additionalProperties.get(key);
|
||||
Object gotAdditionalProperty = gotAdditionalProperties.get(key);
|
||||
|
@ -307,9 +320,9 @@ public class ERManagementTest extends ContextTest {
|
|||
checkUUIDAndMetadata(facet, gotFacet);
|
||||
Class<? extends Facet> clz = facet.getClass();
|
||||
Class<? extends Facet> gotClz = gotFacet.getClass();
|
||||
Assert.assertTrue(clz==gotClz);
|
||||
Assert.assertTrue(clz == gotClz);
|
||||
|
||||
if(clz == SoftwareFacet.class) {
|
||||
if (clz == SoftwareFacet.class) {
|
||||
checkSoftwareFacetAssertion((SoftwareFacet) facet, (SoftwareFacet) gotFacet);
|
||||
}
|
||||
}
|
||||
|
@ -322,9 +335,10 @@ public class ERManagementTest extends ContextTest {
|
|||
List<ConsistsOf<? extends Resource, ? extends Facet>> gotResourceConsistsOf = gotResource.getConsistsOf();
|
||||
Assert.assertTrue(resourceConsistsOf.size() == gotResourceConsistsOf.size());
|
||||
|
||||
for(ConsistsOf<? extends Resource, ? extends Facet> consistsOf : resourceConsistsOf) {
|
||||
for (ConsistsOf<? extends Resource, ? extends Facet> consistsOf : resourceConsistsOf) {
|
||||
@SuppressWarnings("unchecked")
|
||||
ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf = (ConsistsOf<? extends Resource, ? extends Facet>) gotResource.getConsistsOf(consistsOf.getClass(), consistsOf.getTarget().getClass()).get(0);
|
||||
ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf = (ConsistsOf<? extends Resource, ? extends Facet>) gotResource
|
||||
.getConsistsOf(consistsOf.getClass(), consistsOf.getTarget().getClass()).get(0);
|
||||
checkConsistOf(consistsOf, gotConsistsOf);
|
||||
|
||||
Facet facet = consistsOf.getTarget();
|
||||
|
@ -334,7 +348,6 @@ public class ERManagementTest extends ContextTest {
|
|||
|
||||
}
|
||||
|
||||
|
||||
protected static <R extends Resource> void checkResourceRootMetaOnly(R resource, R gotResource) throws Exception {
|
||||
Assert.assertTrue(resource.getClass() == gotResource.getClass());
|
||||
checkUUIDAndMetadata(resource, gotResource);
|
||||
|
@ -343,14 +356,13 @@ public class ERManagementTest extends ContextTest {
|
|||
List<ConsistsOf<? extends Resource, ? extends Facet>> gotResourceConsistsOf = gotResource.getConsistsOf();
|
||||
Assert.assertTrue(resourceConsistsOf.size() == gotResourceConsistsOf.size());
|
||||
|
||||
for(ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf : gotResourceConsistsOf) {
|
||||
for (ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf : gotResourceConsistsOf) {
|
||||
Assert.assertNull(gotConsistsOf.getMetadata());
|
||||
Assert.assertNull(gotConsistsOf.getTarget().getMetadata());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected static <R extends Resource> void checkResourceNoMeta(R resource, R gotResource) throws Exception {
|
||||
Assert.assertTrue(resource.getClass() == gotResource.getClass());
|
||||
Assert.assertNull(gotResource.getMetadata());
|
||||
|
@ -359,7 +371,7 @@ public class ERManagementTest extends ContextTest {
|
|||
List<ConsistsOf<? extends Resource, ? extends Facet>> gotResourceConsistsOf = gotResource.getConsistsOf();
|
||||
Assert.assertTrue(resourceConsistsOf.size() == gotResourceConsistsOf.size());
|
||||
|
||||
for(ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf : gotResourceConsistsOf) {
|
||||
for (ConsistsOf<? extends Resource, ? extends Facet> gotConsistsOf : gotResourceConsistsOf) {
|
||||
Assert.assertNull(gotConsistsOf.getMetadata());
|
||||
Assert.assertNull(gotConsistsOf.getTarget().getMetadata());
|
||||
}
|
||||
|
@ -370,24 +382,23 @@ public class ERManagementTest extends ContextTest {
|
|||
ResourceManagement resourceManagement = new ResourceManagement();
|
||||
resourceManagement.setElementType(r.getTypeName());
|
||||
resourceManagement.setJson(ElementMapper.marshal(r));
|
||||
if(r.getID()!=null) {
|
||||
if (r.getID() != null) {
|
||||
resourceManagement.setUUID(r.getID());
|
||||
}
|
||||
return resourceManagement;
|
||||
}
|
||||
|
||||
public <R extends Resource> IsRelatedToManagement getIsRelatedToManagement(IsRelatedTo<? extends Resource, ? extends Resource> isRelatedTo) throws Exception {
|
||||
public <R extends Resource> IsRelatedToManagement getIsRelatedToManagement(
|
||||
IsRelatedTo<? extends Resource, ? extends Resource> isRelatedTo) throws Exception {
|
||||
IsRelatedToManagement isRelatedToManagement = new IsRelatedToManagement();
|
||||
isRelatedToManagement.setElementType(isRelatedTo.getTypeName());
|
||||
isRelatedToManagement.setJson(ElementMapper.marshal(isRelatedTo));
|
||||
if(isRelatedTo.getID()!=null) {
|
||||
if (isRelatedTo.getID() != null) {
|
||||
isRelatedToManagement.setUUID(isRelatedTo.getID());
|
||||
}
|
||||
return isRelatedToManagement;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public <R extends Resource> R createResource(R r) throws Exception {
|
||||
ResourceManagement resourceManagement = getResourceManagement(r);
|
||||
String json = resourceManagement.create();
|
||||
|
@ -417,9 +428,10 @@ public class ERManagementTest extends ContextTest {
|
|||
return createHostingNode(eService, RemoveConstraint.cascade, DeleteConstraint.cascade);
|
||||
}
|
||||
|
||||
public HostingNode createHostingNode(EService eService, RemoveConstraint removeConstraint, DeleteConstraint deleteConstraint) throws Exception {
|
||||
public HostingNode createHostingNode(EService eService, RemoveConstraint removeConstraint,
|
||||
DeleteConstraint deleteConstraint) throws Exception {
|
||||
HostingNode hostingNode = ERManagementTest.instantiateValidHostingNode();
|
||||
if(eService!=null) {
|
||||
if (eService != null) {
|
||||
PropagationConstraint propagationConstraint = new PropagationConstraintImpl();
|
||||
propagationConstraint.setRemoveConstraint(removeConstraint);
|
||||
propagationConstraint.setDeleteConstraint(deleteConstraint);
|
||||
|
@ -448,19 +460,18 @@ public class ERManagementTest extends ContextTest {
|
|||
}
|
||||
|
||||
public <R extends Resource> void deleteResource(R r) throws Exception {
|
||||
if(r!=null) {
|
||||
if (r != null) {
|
||||
ResourceManagement resourceManagement = getResourceManagement(r);
|
||||
resourceManagement.delete();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateEService() throws Exception {
|
||||
EService eService = null;
|
||||
try {
|
||||
eService = createEService();
|
||||
}finally {
|
||||
} finally {
|
||||
deleteResource(eService);
|
||||
}
|
||||
|
||||
|
@ -487,13 +498,12 @@ public class ERManagementTest extends ContextTest {
|
|||
readEService = (EService) readResource(eService.getID());
|
||||
checkResourceNoMeta(eService, readEService);
|
||||
|
||||
}finally {
|
||||
} finally {
|
||||
deleteResource(eService);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected Resource readResource(UUID uuid) throws Exception {
|
||||
ResourceManagement resourceManagement = new ResourceManagement();
|
||||
resourceManagement.setUUID(uuid);
|
||||
|
@ -503,28 +513,24 @@ public class ERManagementTest extends ContextTest {
|
|||
}
|
||||
|
||||
/*
|
||||
@Test
|
||||
public void testReadResource() throws Exception {
|
||||
readResource(UUID.fromString("26da57ee-33bd-4c4b-8aef-9206b61c329e"));
|
||||
}
|
||||
*/
|
||||
* @Test public void testReadResource() throws Exception {
|
||||
* readResource(UUID.fromString("26da57ee-33bd-4c4b-8aef-9206b61c329e")); }
|
||||
*/
|
||||
|
||||
/*
|
||||
@Test
|
||||
public void testDeleteResource() throws Exception {
|
||||
ResourceManagement resourceManagement = new ResourceManagement();
|
||||
resourceManagement.setUUID(UUID.fromString("64635295-7ced-4931-a55f-40fc8199b280"));
|
||||
boolean deleted = resourceManagement.delete();
|
||||
Assert.assertTrue(deleted);
|
||||
}
|
||||
*/
|
||||
* @Test public void testDeleteResource() throws Exception { ResourceManagement
|
||||
* resourceManagement = new ResourceManagement();
|
||||
* resourceManagement.setUUID(UUID.fromString(
|
||||
* "64635295-7ced-4931-a55f-40fc8199b280")); boolean deleted =
|
||||
* resourceManagement.delete(); Assert.assertTrue(deleted); }
|
||||
*/
|
||||
|
||||
@Test
|
||||
public void testCreateHostingNode() throws Exception {
|
||||
HostingNode hostingNode = null;
|
||||
try {
|
||||
hostingNode = createHostingNode();
|
||||
}finally {
|
||||
} finally {
|
||||
deleteResource(hostingNode);
|
||||
}
|
||||
|
||||
|
@ -554,7 +560,8 @@ public class ERManagementTest extends ContextTest {
|
|||
String createdConsistsOfString = consistsOfManagement.create();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ConsistsOf<EService, CPUFacet> createdConsistsOf = ElementMapper.unmarshal(ConsistsOf.class, createdConsistsOfString);
|
||||
ConsistsOf<EService, CPUFacet> createdConsistsOf = ElementMapper.unmarshal(ConsistsOf.class,
|
||||
createdConsistsOfString);
|
||||
|
||||
CPUFacet createdCpuFacet = createdConsistsOf.getTarget();
|
||||
|
||||
|
@ -671,15 +678,15 @@ public class ERManagementTest extends ContextTest {
|
|||
Assert.assertTrue(encValue.compareTo(encryptedValue) == 0);
|
||||
String decryptedValue = StringEncrypter.getEncrypter().decrypt(encValue);
|
||||
Assert.assertTrue(decryptedValue.compareTo(plainValue) == 0);
|
||||
Assert.assertTrue(((String) apf.getAdditionalProperty(additionalPropertyKey)).compareTo(additionlaPropertyValue) == 0);
|
||||
Assert.assertTrue(
|
||||
((String) apf.getAdditionalProperty(additionalPropertyKey)).compareTo(additionlaPropertyValue) == 0);
|
||||
|
||||
deleteResource(createdConfiguration);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testUpdateFacetValue() throws Exception {
|
||||
EService eService =null;
|
||||
EService eService = null;
|
||||
try {
|
||||
eService = createEService();
|
||||
|
||||
|
@ -697,7 +704,7 @@ public class ERManagementTest extends ContextTest {
|
|||
Assert.assertTrue(softwareFacet.getVersion().compareTo(newVersion) == 0);
|
||||
} catch (Throwable e) {
|
||||
throw e;
|
||||
}finally {
|
||||
} finally {
|
||||
deleteResource(eService);
|
||||
}
|
||||
}
|
||||
|
@ -730,4 +737,125 @@ public class ERManagementTest extends ContextTest {
|
|||
logger.debug("{}", ret);
|
||||
}
|
||||
|
||||
protected List<ERElement> getPaginatedInstances(Class<? extends ERElement> clz, int offset, int limit) throws Exception {
|
||||
ServerRequestInfo requestInfo = RequestUtility.getRequestInfo().get();
|
||||
requestInfo.setOffset(offset);
|
||||
requestInfo.setLimit(limit);
|
||||
|
||||
ElementManagement<?,?> erManagement = ElementManagementUtility.getERManagement(TypeUtility.getTypeName(clz));
|
||||
String ret = erManagement.all(true);
|
||||
|
||||
List<ERElement> list = ElementMapper.unmarshalList(ERElement.class, ret);
|
||||
return list;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLimitOffset() throws Exception {
|
||||
Map<String, Resource> map = createHostingNodeAndEService();
|
||||
logger.debug("---------------------------------------------------------------------");
|
||||
try {
|
||||
for(int i=1; i<6; i++) {
|
||||
testLimitOffset(Resource.class, i);
|
||||
testLimitOffset(Facet.class, i);
|
||||
testLimitOffset(IsRelatedTo.class, i);
|
||||
testLimitOffset(ConsistsOf.class, i);
|
||||
}
|
||||
logger.debug("---------------------------------------------------------------------\n\n\n");
|
||||
} finally {
|
||||
deleteResource(map.get(HostingNode.NAME));
|
||||
}
|
||||
}
|
||||
|
||||
public void testLimitOffset(Class<? extends ERElement> clz, int limit) throws Exception {
|
||||
if(limit <1) {
|
||||
throw new Exception("This test has been designed for limit>0 to test pagination");
|
||||
}
|
||||
|
||||
String typeName = TypeUtility.getTypeName(clz);
|
||||
|
||||
logger.debug("Going to test pagination of {} using limit {}", typeName, limit);
|
||||
int offset = limit * 0;
|
||||
|
||||
List<ERElement> list = getPaginatedInstances(clz, offset, limit);
|
||||
if (list.size() == 0) {
|
||||
logger.debug("Found 0 instances");
|
||||
logger.debug("Successfully tested pagination of {} using limit {}\n\n", typeName, limit);
|
||||
return;
|
||||
}
|
||||
Assert.assertTrue(list.size() <= limit);
|
||||
|
||||
Set<UUID> uuids = new HashSet<>();
|
||||
for (ERElement erElement : list) {
|
||||
UUID uuid = erElement.getID();
|
||||
uuids.add(uuid);
|
||||
logger.debug("[offset={},limit={}] Found {} with UUID {}", offset, limit, erElement.getTypeName(), uuid);
|
||||
}
|
||||
|
||||
if (list.size() < limit) {
|
||||
logger.debug("Successfully tested pagination of {} using limit {}\n\n", typeName, limit);
|
||||
return;
|
||||
}
|
||||
|
||||
offset = limit * 1;
|
||||
list = getPaginatedInstances(clz, offset, limit);
|
||||
if (list.size() > 0) {
|
||||
Assert.assertTrue(list.size() <= limit);
|
||||
|
||||
for (ERElement erElement : list) {
|
||||
UUID uuid = erElement.getID();
|
||||
Assert.assertFalse(uuids.contains(uuid));
|
||||
uuids.add(uuid);
|
||||
logger.debug("[offset={},limit={}] Found {} with UUID {}", offset, limit, erElement.getTypeName(), uuid);
|
||||
}
|
||||
|
||||
if (list.size() < limit) {
|
||||
logger.debug("Successfully tested pagination of {} using limit {}\n\n", typeName, limit);
|
||||
return;
|
||||
}
|
||||
|
||||
offset = 0;
|
||||
int doubleLimit = limit * 2;
|
||||
list = getPaginatedInstances(clz, offset, doubleLimit);
|
||||
|
||||
Assert.assertTrue(list.size() <= doubleLimit);
|
||||
|
||||
for (ERElement erElement : list) {
|
||||
UUID uuid = erElement.getID();
|
||||
logger.debug("[offset={},limit={}] Found {} with UUID {}", offset, doubleLimit, erElement.getTypeName(), uuid);
|
||||
Assert.assertTrue(uuids.contains(uuid));
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Going to check all {} instances", typeName);
|
||||
|
||||
List<ERElement> all = getPaginatedInstances(clz, 0, -1);
|
||||
|
||||
uuids = new HashSet<>();
|
||||
|
||||
int i = -1;
|
||||
|
||||
while(true) {
|
||||
offset = ++i * limit;
|
||||
list = getPaginatedInstances(clz, offset, limit);
|
||||
for(ERElement erElement : list) {
|
||||
UUID uuid = erElement.getID();
|
||||
logger.info("[offset={},limit={}] Found {} with UUID {}", offset, limit, erElement.getTypeName(), uuid);
|
||||
uuids.add(uuid);
|
||||
}
|
||||
if(list.size()<limit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Assert.assertTrue(all.size()==uuids.size());
|
||||
|
||||
for(ERElement erElement : all) {
|
||||
UUID uuid = erElement.getID();
|
||||
Assert.assertTrue(uuids.contains(uuid));
|
||||
logger.info("[UNLIMITED] Found {} with UUID {}", erElement.getTypeName(), uuid);
|
||||
|
||||
}
|
||||
|
||||
logger.debug("Successfully tested pagination of {} using limit {}\n\n", TypeUtility.getTypeName(clz), limit);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -445,6 +445,37 @@ public class SmartgearResourcesTest extends ERManagementTest {
|
|||
public static final String CPU_MODEL_B = "model\b";
|
||||
public static final String CPU_MODEL_NUMBER = "modelNumber";
|
||||
|
||||
|
||||
public static List<CPUFacet> getFakeCPUFacets() {
|
||||
List<CPUFacet> cpuFacets = new ArrayList<>();
|
||||
|
||||
if(cpuFacets.size()==0) {
|
||||
for(int i=0; i<8; i++) {
|
||||
CPUFacet cpuFacet = new CPUFacetImpl();
|
||||
cpuFacet.setVendor("Apple");
|
||||
cpuFacet.setModel("Apple M2 Pro");
|
||||
cpuFacet.setClockSpeed("3,504 GHz");
|
||||
cpuFacet.setAdditionalProperty("coreType", "Performance Cores (Avalanche)");
|
||||
cpuFacets.add(cpuFacet);
|
||||
}
|
||||
for(int i=0; i<4; i++) {
|
||||
CPUFacet cpuFacet = new CPUFacetImpl();
|
||||
cpuFacet.setVendor("Apple");
|
||||
cpuFacet.setModel("Apple M2 Pro");
|
||||
cpuFacet.setClockSpeed("2.424 GHz");
|
||||
cpuFacet.setAdditionalProperty("coreType", "Efficiency Cores (Blizzard)");
|
||||
cpuFacets.add(cpuFacet);
|
||||
|
||||
}
|
||||
}
|
||||
return cpuFacets;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function properly works on Linux but fails on
|
||||
* other system such as Mac OS. In such case, the function
|
||||
* getFakeCPUFacet() is used instead;
|
||||
*/
|
||||
public static List<CPUFacet> getCPUFacets() {
|
||||
|
||||
List<CPUFacet> cpuFacets = new ArrayList<>();
|
||||
|
@ -453,7 +484,7 @@ public class SmartgearResourcesTest extends ERManagementTest {
|
|||
|
||||
if (!file.exists()) {
|
||||
logger.warn("cannot acquire CPU info (no /proc/cpuinfo)");
|
||||
return cpuFacets;
|
||||
return getFakeCPUFacets();
|
||||
}
|
||||
|
||||
BufferedReader input = null;
|
||||
|
@ -531,6 +562,11 @@ public class SmartgearResourcesTest extends ERManagementTest {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(cpuFacets.size()==0) {
|
||||
return getFakeCPUFacets();
|
||||
}
|
||||
|
||||
return cpuFacets;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,11 +5,17 @@ import java.io.File;
|
|||
import java.io.FileReader;
|
||||
import java.io.FilenameFilter;
|
||||
import java.net.URL;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.gcube.informationsystem.model.reference.entities.Entity;
|
||||
import org.gcube.informationsystem.resourceregistry.ContextTest;
|
||||
import org.gcube.informationsystem.resourceregistry.queries.json.JsonQuery;
|
||||
import org.gcube.informationsystem.serialization.ElementMapper;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -29,8 +35,45 @@ public class JsonQueryTest extends ContextTest {
|
|||
return new File(resourcesDirectory, "queries");
|
||||
}
|
||||
|
||||
public File getProjectionQueriesDirectory() throws Exception {
|
||||
URL logbackFileURL = JsonQueryTest.class.getClassLoader().getResource("logback-test.xml");
|
||||
File logbackFile = new File(logbackFileURL.toURI());
|
||||
File resourcesDirectory = logbackFile.getParentFile();
|
||||
return new File(resourcesDirectory, "projection-queries");
|
||||
}
|
||||
|
||||
protected boolean compareQueries(StringBuffer createdSb, StringBuffer expectedSb) {
|
||||
return compareQueries(createdSb.toString(), expectedSb.toString());
|
||||
}
|
||||
|
||||
protected String normalizeString(String s) {
|
||||
return s.replaceAll("\n{1,}", "")
|
||||
.replaceAll("\r{1,}", "")
|
||||
.replaceAll("\t{1,}", "")
|
||||
.replaceAll("\\s{2,}", " ")
|
||||
.replaceAll("\\(\\s{1,}", "(")
|
||||
.replaceAll("\\s{1,}\\(", "(")
|
||||
.replaceAll("\\)\\s{1,}", ")")
|
||||
.replaceAll("\\s{1,}\\)", ")");
|
||||
}
|
||||
|
||||
protected boolean compareQueries(String createdString, String expectedString) {
|
||||
String created = normalizeString(createdString);
|
||||
String expected = normalizeString(expectedString);
|
||||
logger.debug(created);
|
||||
logger.debug(expected);
|
||||
return created.compareTo(expected)==0 ? true : false;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJsonQueries() throws Exception {
|
||||
public void testCompares() throws Exception {
|
||||
String a = "))\n\t\r ) ) ) )";
|
||||
String b = "))))))";
|
||||
Assert.assertTrue(compareQueries(a, b));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueries() throws Exception {
|
||||
ContextTest.setContextByName(DEVVRE);
|
||||
|
||||
File queriesDirectory = getQueriesDirectory();
|
||||
|
@ -43,7 +86,7 @@ public class JsonQueryTest extends ContextTest {
|
|||
};
|
||||
|
||||
for(File jsonQueryFile : queriesDirectory.listFiles(filenameFilter)) {
|
||||
logger.info("Going to read JSON query frtm file {}", jsonQueryFile.getAbsolutePath());
|
||||
logger.info("Going to read JSON query from file {}", jsonQueryFile.getAbsolutePath());
|
||||
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
|
||||
|
@ -56,7 +99,7 @@ public class JsonQueryTest extends ContextTest {
|
|||
logger.info("Created Query from JSON: {}", createdStringBuffer.toString());
|
||||
|
||||
StringBuffer expectedStringBuffer = new StringBuffer();
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("json", "query"));
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("json", "match.oquery"));
|
||||
try(BufferedReader br = new BufferedReader(new FileReader(expectedQueryFile))) {
|
||||
for(String line; (line = br.readLine()) != null; ) {
|
||||
expectedStringBuffer.append(line);
|
||||
|
@ -64,8 +107,7 @@ public class JsonQueryTest extends ContextTest {
|
|||
}
|
||||
|
||||
logger.info("Expected Query from JSON: {}", expectedStringBuffer.toString());
|
||||
|
||||
Assert.assertTrue(createdStringBuffer.toString().compareTo(expectedStringBuffer.toString())==0);
|
||||
Assert.assertTrue(compareQueries(createdStringBuffer, expectedStringBuffer));
|
||||
|
||||
String result = jsonQuery.query();
|
||||
logger.info("Result : {}", result);
|
||||
|
@ -73,9 +115,9 @@ public class JsonQueryTest extends ContextTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSingleCreateQuery() throws Exception {
|
||||
public void testSingleQuery() throws Exception {
|
||||
File queriesDirectory = getQueriesDirectory();
|
||||
File jsonQueryFile = new File(queriesDirectory, "query6.json");
|
||||
File jsonQueryFile = new File(queriesDirectory, "query10.json");
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
|
||||
logger.info("Going to test the following JSON query {}", jsonNode.toString());
|
||||
|
@ -87,7 +129,7 @@ public class JsonQueryTest extends ContextTest {
|
|||
logger.info("Created Query from JSON: {}", createdStringBuffer.toString());
|
||||
|
||||
StringBuffer expectedStringBuffer = new StringBuffer();
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("json", "query"));
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("json", "match.oquery"));
|
||||
try(BufferedReader br = new BufferedReader(new FileReader(expectedQueryFile))) {
|
||||
for(String line; (line = br.readLine()) != null; ) {
|
||||
expectedStringBuffer.append(line);
|
||||
|
@ -95,18 +137,16 @@ public class JsonQueryTest extends ContextTest {
|
|||
}
|
||||
|
||||
logger.info("Expected Query from JSON: {}", expectedStringBuffer.toString());
|
||||
|
||||
Assert.assertTrue(createdStringBuffer.toString().compareTo(expectedStringBuffer.toString())==0);
|
||||
Assert.assertTrue(compareQueries(createdStringBuffer, expectedStringBuffer));
|
||||
|
||||
String result = jsonQuery.query();
|
||||
logger.info("Result : {}", result);
|
||||
}
|
||||
|
||||
// @Test
|
||||
public void testSingleQuery() throws Exception {
|
||||
protected List<Entity> testSingleQuery(int offset, int limit) throws Exception {
|
||||
ContextTest.setContextByName(DEVVRE);
|
||||
File queriesDirectory = getQueriesDirectory();
|
||||
File jsonQueryFile = new File(queriesDirectory, "query3.json");
|
||||
File jsonQueryFile = new File(queriesDirectory, "query1.json");
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
|
||||
logger.info("Going to test the following JSON query {}", jsonNode.toString());
|
||||
|
@ -115,6 +155,140 @@ public class JsonQueryTest extends ContextTest {
|
|||
jsonQuery.setJsonQuery(jsonNode);
|
||||
String res = jsonQuery.query();
|
||||
logger.info(res);
|
||||
|
||||
List<Entity> ret = ElementMapper.unmarshalList(Entity.class, res);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testLimitOffset() throws Exception {
|
||||
int limit = 2;
|
||||
|
||||
List<Entity> entities = testSingleQuery(0, limit);
|
||||
if(entities.size()==0) {
|
||||
return;
|
||||
}
|
||||
Assert.assertTrue(entities.size() <= limit);
|
||||
|
||||
if(entities.size()< limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
Set<UUID> uuids = new HashSet<>();
|
||||
for(Entity entity : entities) {
|
||||
UUID uuid = entity.getID();
|
||||
uuids.add(uuid);
|
||||
logger.info("Found {} with UUID {}", Entity.NAME, uuid);
|
||||
}
|
||||
|
||||
entities = testSingleQuery(limit, limit);
|
||||
|
||||
if(entities.size()>0) {
|
||||
Assert.assertTrue(entities.size() <= limit);
|
||||
|
||||
for(Entity entity : entities) {
|
||||
UUID uuid = entity.getID();
|
||||
Assert.assertFalse(uuids.contains(uuid));
|
||||
uuids.add(uuid);
|
||||
logger.info("Found {} with UUID {}", Entity.NAME, uuid);
|
||||
}
|
||||
|
||||
if(entities.size()<limit) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
int doubleLimit = limit*2;
|
||||
|
||||
entities = testSingleQuery(0, doubleLimit);
|
||||
|
||||
Assert.assertTrue(entities.size() <= doubleLimit);
|
||||
|
||||
for(Entity entity : entities) {
|
||||
UUID uuid = entity.getID();
|
||||
logger.info("Checking if {} with UUID {} was contained in the previous queries", Entity.NAME, uuid);
|
||||
Assert.assertTrue(uuids.contains(uuid));
|
||||
logger.info("As expected got {} with UUID {} and name {}", Entity.NAME, uuid);
|
||||
}
|
||||
}
|
||||
|
||||
entities = testSingleQuery(0, -1);
|
||||
|
||||
Assert.assertTrue(entities.size()>=uuids.size());
|
||||
|
||||
for(Entity entity : entities) {
|
||||
UUID uuid = entity.getID();
|
||||
logger.info("No limit listing: Got {} with UUID {}", Entity.NAME, uuid);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleProjectionQuery() throws Exception {
|
||||
File queriesDirectory = getProjectionQueriesDirectory();
|
||||
File jsonQueryFile = new File(queriesDirectory, "HostingNode-query.json");
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
|
||||
logger.info("Going to test the following JSON query {}", jsonNode.toString());
|
||||
|
||||
JsonQuery jsonQuery = new JsonQuery();
|
||||
jsonQuery.setJsonQuery(jsonNode);
|
||||
StringBuffer createdStringBuffer = jsonQuery.createMatchQuery();
|
||||
|
||||
logger.info("Created Query from JSON:\n{}", createdStringBuffer.toString());
|
||||
|
||||
StringBuffer expectedStringBuffer = new StringBuffer();
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("-query.json", ".match.oquery"));
|
||||
try(BufferedReader br = new BufferedReader(new FileReader(expectedQueryFile))) {
|
||||
for(String line; (line = br.readLine()) != null; ) {
|
||||
expectedStringBuffer.append(line);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Expected Query from JSON: {}", expectedStringBuffer.toString());
|
||||
|
||||
Assert.assertTrue(compareQueries(createdStringBuffer, expectedStringBuffer));
|
||||
|
||||
String result = jsonQuery.query();
|
||||
logger.info("Result : {}", result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProjectionQueries() throws Exception {
|
||||
File queriesDirectory = getProjectionQueriesDirectory();
|
||||
|
||||
FilenameFilter filenameFilter = new FilenameFilter() {
|
||||
@Override
|
||||
public boolean accept(File dir, String name) {
|
||||
return name.endsWith("-query.json");
|
||||
}
|
||||
};
|
||||
|
||||
for(File jsonQueryFile : queriesDirectory.listFiles(filenameFilter)) {
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode jsonNode = objectMapper.readTree(jsonQueryFile);
|
||||
logger.info("Going to test the following JSON query {}", jsonNode.toString());
|
||||
|
||||
JsonQuery jsonQuery = new JsonQuery();
|
||||
jsonQuery.setJsonQuery(jsonNode);
|
||||
StringBuffer createdStringBuffer = jsonQuery.createMatchQuery();
|
||||
|
||||
logger.info("Created Query from JSON:\n{}", createdStringBuffer.toString());
|
||||
|
||||
StringBuffer expectedStringBuffer = new StringBuffer();
|
||||
File expectedQueryFile = new File(queriesDirectory, jsonQueryFile.getName().replace("-query.json", ".match.oquery"));
|
||||
try(BufferedReader br = new BufferedReader(new FileReader(expectedQueryFile))) {
|
||||
for(String line; (line = br.readLine()) != null; ) {
|
||||
expectedStringBuffer.append(line);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Expected Query from JSON: {}", expectedStringBuffer.toString());
|
||||
Assert.assertTrue(compareQueries(createdStringBuffer, expectedStringBuffer));
|
||||
|
||||
String result = jsonQuery.query();
|
||||
logger.info("Result : {}", result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ public class QueryTest extends ERManagementTest {
|
|||
public void testRawQuery() throws InvalidQueryException {
|
||||
QueryImpl queryImpl = new QueryImpl();
|
||||
|
||||
String query = "select from SoftwareFacet";
|
||||
String query = "SELECT FROM SoftwareFacet";
|
||||
String ret = queryImpl.query(query, false);
|
||||
|
||||
logger.debug(ret);
|
||||
|
@ -337,7 +337,7 @@ public class QueryTest extends ERManagementTest {
|
|||
@Test
|
||||
public void getAllResourcesHavingFacet() throws ObjectNotFound, Exception {
|
||||
String[] names = new String[]{"MyTest", "MyTest", "Aux", "MyID", "MyID"};
|
||||
Object[] additionalProperties = new Object[] {5, 5.0, false, "test", null};
|
||||
Object[] additionalProperties = new Object[] {5, 6.0, false, "test", null};
|
||||
|
||||
List<Configuration> createdConfigurations = new ArrayList<>();
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package org.gcube.informationsystem.resourceregistry.queries.operators;
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MathematicsOperatorTest {
|
||||
|
||||
protected static Logger logger = LoggerFactory.getLogger(MathematicsOperatorTest.class);
|
||||
|
||||
public static final String JSON = "{ \n"
|
||||
+ " \"values\" : [\n"
|
||||
+ " {\n"
|
||||
+ " \"_minus\" : [\n"
|
||||
+ " \"size\", \n"
|
||||
+ " \"used\" \n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"unit\" \n"
|
||||
+ " ],\n"
|
||||
+ " \"separator\" : \" \",\n"
|
||||
+ " \"as\" : \"HD Space Left\"\n"
|
||||
+ "}";
|
||||
|
||||
@Test
|
||||
public void testGenerateFieldToEmit() throws Exception {
|
||||
ObjectMapper om = new ObjectMapper();
|
||||
JsonNode jn = om.readTree(JSON);
|
||||
String s = MatemathicsOperator.SUM.generateFieldToEmit(jn, "haspersistentmemory20");
|
||||
logger.debug(s);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -14,22 +14,26 @@ import org.gcube.informationsystem.model.reference.entities.Entity;
|
|||
import org.gcube.informationsystem.model.reference.entities.Facet;
|
||||
import org.gcube.informationsystem.model.reference.entities.Resource;
|
||||
import org.gcube.informationsystem.model.reference.properties.Encrypted;
|
||||
import org.gcube.informationsystem.model.reference.properties.Event;
|
||||
import org.gcube.informationsystem.model.reference.properties.Metadata;
|
||||
import org.gcube.informationsystem.model.reference.properties.PropagationConstraint;
|
||||
import org.gcube.informationsystem.model.reference.properties.Property;
|
||||
import org.gcube.informationsystem.model.reference.relations.ConsistsOf;
|
||||
import org.gcube.informationsystem.model.reference.relations.IsRelatedTo;
|
||||
import org.gcube.informationsystem.model.reference.relations.Relation;
|
||||
import org.gcube.informationsystem.queries.templates.reference.properties.TemplateVariable;
|
||||
import org.gcube.informationsystem.resourceregistry.ContextTest;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaAlreadyPresentException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaException;
|
||||
import org.gcube.informationsystem.resourceregistry.api.exceptions.types.SchemaNotFoundException;
|
||||
import org.gcube.informationsystem.types.TypeMapper;
|
||||
import org.gcube.informationsystem.types.reference.Type;
|
||||
import org.gcube.informationsystem.types.reference.properties.PropertyDefinition;
|
||||
import org.gcube.informationsystem.types.reference.relations.RelationType;
|
||||
import org.gcube.informationsystem.utils.Version;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.facets.AccessPointFacet;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.facets.ContactFacet;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.facets.EventFacet;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.resources.Actor;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.resources.EService;
|
||||
import org.gcube.resourcemanagement.model.reference.entities.resources.RunningPlugin;
|
||||
|
@ -400,4 +404,47 @@ public class SchemaManagementImplTest extends ContextTest {
|
|||
compareTypes(type, typeManaged);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void update_to_IS_Model_7_1_0_and_GCube_Model_5_1_0() throws Exception {
|
||||
// TypeManagement typeManagement = new TypeManagement();
|
||||
// String typeName = PropertyDefinition.NAME;
|
||||
// typeManagement.setTypeName(typeName);
|
||||
// logger.info("Going to delete {}", typeName);
|
||||
// typeManagement.delete(AccessType.PROPERTY_ELEMENT);
|
||||
//
|
||||
//
|
||||
// Type type = TypeMapper.createTypeDefinition(PropertyDefinition.class);
|
||||
// logger.info("Going to create {} : {}", type.getName(), TypeMapper.serializeTypeDefinition(type));
|
||||
// String ret = create(PropertyDefinition.class);
|
||||
// logger.info("Created {} : {}", type.getName(), ret);
|
||||
// Type typeManaged = TypeMapper.deserializeTypeDefinition(ret);
|
||||
// compareTypes(type, typeManaged);
|
||||
|
||||
|
||||
// TypeManagement typeManagement = new TypeManagement();
|
||||
// String typeName = TemplateVariable.NAME;
|
||||
// typeManagement.setTypeName(typeName);
|
||||
// logger.info("Going to delete {}", typeName);
|
||||
// typeManagement.delete(AccessType.PROPERTY_ELEMENT);
|
||||
|
||||
|
||||
// Type type = TypeMapper.createTypeDefinition(TemplateVariable.class);
|
||||
// logger.info("Going to create {} : {}", type.getName(), TypeMapper.serializeTypeDefinition(type));
|
||||
// String ret = create(TemplateVariable.class);
|
||||
// logger.info("Created {} : {}", type.getName(), ret);
|
||||
// Type typeManaged = TypeMapper.deserializeTypeDefinition(ret);
|
||||
// compareTypes(type, typeManaged);
|
||||
|
||||
// create(AttributeProperty.class);
|
||||
|
||||
// create(TemplateFacet.class);
|
||||
|
||||
// update(ConfigurationTemplate.class, true);
|
||||
|
||||
// create(Event.class);
|
||||
|
||||
// update(EventFacet.class, true);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,9 @@
|
|||
<logger name="org.gcube.informationsystem.types" level="INFO" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry.dbinitialization" level="INFO" />
|
||||
<logger name="org.gcube.informationsystem.utils.discovery" level="ERROR" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry.types" level="TRACE" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry" level="TRACE" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry.contexts" level="INFO" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry.types" level="INFO" />
|
||||
<logger name="org.gcube.informationsystem.resourceregistry.instances" level="TRACE" />
|
||||
|
||||
<root level="WARN">
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
[
|
||||
{
|
||||
"Id" : "",
|
||||
"Group": "",
|
||||
"Name" : "",
|
||||
"Version" : "",
|
||||
"Status" : "",
|
||||
"Host" ""
|
||||
}
|
||||
]
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"type": "EService",
|
||||
"_emit": {
|
||||
"id": "ID"
|
||||
},
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "IsIdentifiedBy",
|
||||
"target": {
|
||||
"type": "SoftwareFacet",
|
||||
"_emit": {
|
||||
"group": "Group",
|
||||
"name": "Name",
|
||||
"version": "Version"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "ConsistsOf",
|
||||
"target": {
|
||||
"type": "StateFacet",
|
||||
"_emit": {
|
||||
"value": "Status"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"isRelatedTo": [
|
||||
{
|
||||
"type": "Activates",
|
||||
"source": {
|
||||
"type": "HostingNode",
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "IsIdentifiedBy",
|
||||
"target": {
|
||||
"type": "NetworkingFacet",
|
||||
"_emit": {
|
||||
"hostName": "Host"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
MATCH
|
||||
{class: EService, as: eservice0, where: ($currentMatch['@class'] INSTANCEOF 'EService')}
|
||||
|
||||
.inE('Activates') { as: activates00, where: ($currentMatch['@class'] INSTANCEOF 'Activates')}
|
||||
.outV('HostingNode') { as: hostingnode000, where: ($currentMatch['@class'] INSTANCEOF 'HostingNode')}
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby0000, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('NetworkingFacet') { as: networkingfacet00000, where: ($currentMatch['@class'] INSTANCEOF 'NetworkingFacet')}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby0000 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode000 == $currentMatch)}
|
||||
.outE('Activates') { where: ($matched.activates00 == $currentMatch)}
|
||||
.inV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby01, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet010, where: ($currentMatch['@class'] INSTANCEOF 'SoftwareFacet')}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby01 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof02, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('StateFacet') { as: statefacet020, where: ($currentMatch['@class'] INSTANCEOF 'StateFacet')}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof02 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
networkingfacet00000.hostName AS `Host`,
|
||||
softwarefacet010.group AS `Group`,
|
||||
softwarefacet010.name AS `Name`,
|
||||
softwarefacet010.version AS `Version`,
|
||||
statefacet020.value AS `Status`,
|
||||
eservice0.id AS `ID`
|
|
@ -0,0 +1,18 @@
|
|||
[
|
||||
{
|
||||
"id" : "",
|
||||
"Last Update Time": "",
|
||||
"HostName" : "",
|
||||
"Status" : "",
|
||||
"HD Space Left" : "34 Gb",
|
||||
"Mem. Available." "1,23 Gb"
|
||||
},
|
||||
{
|
||||
"id" : "",
|
||||
"Last Update Time": "",
|
||||
"HostName" : "",
|
||||
"Status" : "",
|
||||
"HD Space Left" : "42 Gb",
|
||||
"Mem. Available." "2,54 Gb"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"type": "HostingNode",
|
||||
"_emit" : {
|
||||
"id" : "ID"
|
||||
},
|
||||
"metadata" : {
|
||||
"_emit" : {
|
||||
"lastUpdateTime" : "Last Update Time"
|
||||
}
|
||||
},
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "IsIdentifiedBy",
|
||||
"target": {
|
||||
"type": "NetworkingFacet",
|
||||
"_emit" : {
|
||||
"hostName" : "HostName"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "ConsistsOf",
|
||||
"target": {
|
||||
"type": "StateFacet",
|
||||
"_emit" : {
|
||||
"value" : "Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "HasPersistentMemory",
|
||||
"target": {
|
||||
"type": "MemoryFacet",
|
||||
"_sum" : {
|
||||
"values" : [
|
||||
{
|
||||
"_minus" : [
|
||||
"size",
|
||||
"used"
|
||||
]
|
||||
},
|
||||
"unit"
|
||||
],
|
||||
"separator" : " ",
|
||||
"as" : "HD Space Left"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "HasVolatileMemory",
|
||||
"target": {
|
||||
"type": "MemoryFacet",
|
||||
"jvmMaxMemory" : null,
|
||||
"_sum" : {
|
||||
"values" : [
|
||||
"size",
|
||||
"unit"
|
||||
],
|
||||
"separator" : " ",
|
||||
"as" : "Mem. Available."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
MATCH
|
||||
{class: HostingNode, as: hostingnode0, where: ($currentMatch['@class'] INSTANCEOF 'HostingNode')}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby00, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('NetworkingFacet') { as: networkingfacet000, where: ($currentMatch['@class'] INSTANCEOF 'NetworkingFacet')}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby00 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof01, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('StateFacet') { as: statefacet010, where: ($currentMatch['@class'] INSTANCEOF 'StateFacet')}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof01 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode0 == $currentMatch)}
|
||||
|
||||
.outE('HasPersistentMemory') { as: haspersistentmemory02, where: ($currentMatch['@class'] INSTANCEOF 'HasPersistentMemory')}
|
||||
.inV('MemoryFacet') { as: memoryfacet020, where: ($currentMatch['@class'] INSTANCEOF 'MemoryFacet')}
|
||||
.inE('HasPersistentMemory') { where: ($matched.haspersistentmemory02 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode0 == $currentMatch)}
|
||||
|
||||
.outE('HasVolatileMemory') { as: hasvolatilememory03, where: ($currentMatch['@class'] INSTANCEOF 'HasVolatileMemory')}
|
||||
.inV('MemoryFacet') { as: memoryfacet030, where: (($currentMatch['@class'] INSTANCEOF 'MemoryFacet') AND (jvmMaxMemory IS null))}
|
||||
.inE('HasVolatileMemory') { where: ($matched.hasvolatilememory03 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
networkingfacet000.hostName AS `HostName`,
|
||||
statefacet010.value AS `Status`,
|
||||
((memoryfacet020.size - memoryfacet020.used) + ' ' + memoryfacet020.unit) AS `HD Space Left`,
|
||||
(memoryfacet030.size + ' ' + memoryfacet030.unit) AS `Mem. Available.`,
|
||||
hostingnode0.id AS `ID`,
|
||||
hostingnode0.metadata.lastUpdateTime AS `Last Update Time`
|
|
@ -0,0 +1,17 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: EService, as: eservice0, where: ($currentMatch['@class'] INSTANCEOF 'EService')}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof00, where: (($currentMatch['@class'] INSTANCEOF 'ConsistsOf') AND (propagationConstraint.add = "propagate"))}
|
||||
.inV('StateFacet') { as: statefacet000, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby01, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet010, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (name = "data-transfer-service" AND group = "DataTransfer"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby01 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(eservice0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
TRAVERSE outV("EService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("EService") FROM ( SELECT FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM StateFacet WHERE value = "down")) WHERE propagationConstraint.add = "propagate")))) WHERE name = "data-transfer-service" AND group = "DataTransfer"))
|
|
@ -0,0 +1,19 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM StateFacet WHERE value = "down"
|
||||
)
|
||||
) WHERE propagationConstraint.add = "propagate"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE name = "data-transfer-service" AND group = "DataTransfer"
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "EService"
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"type": "HostingNode",
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "ConsistsOf",
|
||||
"target": {
|
||||
"type": "StateFacet",
|
||||
"value": "certified"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: HostingNode, as: hostingnode0, where: ($currentMatch['@class'] INSTANCEOF 'HostingNode')}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof00, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('StateFacet') { as: statefacet000, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "certified"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(hostingnode0) as ret
|
||||
)
|
|
@ -0,0 +1,19 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM StateFacet WHERE value = "down"
|
||||
)
|
||||
) WHERE propagationConstraint.add = "propagate"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE name = "data-transfer-service" AND group = "DataTransfer"
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "EService"
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"type": "ConsistsOf",
|
||||
"source": {
|
||||
"type": "HostingNode",
|
||||
"id": "34498fb5-e184-473b-ad2f-08f7ab1afd35"
|
||||
},
|
||||
"target": {
|
||||
"type" : "CPUFacet"
|
||||
}
|
||||
}
|
|
@ -23,17 +23,17 @@
|
|||
"type": "ConsistsOf",
|
||||
"target": {
|
||||
"type": "AccessPointFacet",
|
||||
"endpoint": "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"
|
||||
"endpoint": "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"
|
||||
}
|
||||
}
|
||||
],
|
||||
"isRelatedTo" : [
|
||||
{
|
||||
"type": "Activates",
|
||||
"id": "d3f58e52-5346-47bc-b736-9d77a0b554ce",
|
||||
"id": "bd89a311-780d-4efe-93e5-08281e53bce7",
|
||||
"source": {
|
||||
"type": "HostingNode",
|
||||
"id" : "5fbc1a56-d450-4f0f-85c1-9b1684581717"
|
||||
"id" : "44fac329-eed5-4f18-90ba-a54d5aad316e"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: EService, as: eservice0, where: ($currentMatch['@class'] INSTANCEOF 'EService')}
|
||||
|
||||
.inE('Activates') { as: activates00, where: (($currentMatch['@class'] INSTANCEOF 'Activates') AND (id = "bd89a311-780d-4efe-93e5-08281e53bce7"))}
|
||||
.outV('HostingNode') { as: hostingnode000, where: (($currentMatch['@class'] INSTANCEOF 'HostingNode') AND (id = "44fac329-eed5-4f18-90ba-a54d5aad316e"))}
|
||||
.outE('Activates') { where: ($matched.activates00 == $currentMatch)}
|
||||
.inV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof01, where: (($currentMatch['@class'] INSTANCEOF 'ConsistsOf') AND (propagationConstraint.add = "propagate"))}
|
||||
.inV('StateFacet') { as: statefacet010, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof01 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby02, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet020, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (name = "data-transfer-service" AND group = "DataTransfer"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby02 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof03, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('AccessPointFacet') { as: accesspointfacet030, where: (($currentMatch['@class'] INSTANCEOF 'AccessPointFacet') AND (endpoint = "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof03 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(eservice0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
TRAVERSE outV("EService") FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("AccessPointFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("EService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("EService") FROM ( SELECT FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE inV("EService") FROM ( TRAVERSE outE("Activates") FROM ( SELECT FROM ( TRAVERSE outV("HostingNode") FROM ( SELECT FROM Activates WHERE id = "d3f58e52-5346-47bc-b736-9d77a0b554ce")) WHERE id = "5fbc1a56-d450-4f0f-85c1-9b1684581717"))))) WHERE value = "down")) WHERE propagationConstraint.add = "propagate")))) WHERE name = "data-transfer-service" AND group = "DataTransfer"))))) WHERE endpoint = "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"))
|
|
@ -0,0 +1,41 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("AccessPointFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE inV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE outE("Activates") FROM (
|
||||
SELECT FROM HostingNode WHERE id = "44fac329-eed5-4f18-90ba-a54d5aad316e"
|
||||
)
|
||||
) WHERE id = "bd89a311-780d-4efe-93e5-08281e53bce7"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE value = "down"
|
||||
)
|
||||
) WHERE propagationConstraint.add = "propagate"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE name = "data-transfer-service" AND group = "DataTransfer"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE endpoint = "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "EService"
|
|
@ -0,0 +1,31 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: EService, as: eservice0, where: ($currentMatch['@class'] INSTANCEOF 'EService')}
|
||||
|
||||
.inE('Activates') { as: activates00, where: ($currentMatch['@class'] INSTANCEOF 'Activates')}
|
||||
.outV('HostingNode') { as: hostingnode000, where: ($currentMatch['@class'] INSTANCEOF 'HostingNode')}
|
||||
.outE('ConsistsOf') { as: consistsof0000, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('CPUFacet') { as: cpufacet00000, where: (($currentMatch['@class'] INSTANCEOF 'CPUFacet') AND (vendor = "GenuineIntel"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof0000 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode000 == $currentMatch)}
|
||||
.outE('Activates') { where: ($matched.activates00 == $currentMatch)}
|
||||
.inV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof01, where: (($currentMatch['@class'] INSTANCEOF 'ConsistsOf') AND (propagationConstraint.add = "propagate"))}
|
||||
.inV('StateFacet') { as: statefacet010, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof01 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby02, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet020, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (name = "data-transfer-service" AND group = "DataTransfer"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby02 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof03, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('AccessPointFacet') { as: accesspointfacet030, where: (($currentMatch['@class'] INSTANCEOF 'AccessPointFacet') AND (endpoint = "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof03 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(eservice0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
TRAVERSE outV("EService") FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("AccessPointFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("EService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("EService") FROM ( SELECT FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE inV("EService") FROM ( TRAVERSE outE("Activates") FROM ( TRAVERSE outV("HostingNode") FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("CPUFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("HostingNode") FROM ( SELECT FROM Activates)))) WHERE vendor = "GenuineIntel"))))))) WHERE value = "down")) WHERE propagationConstraint.add = "propagate")))) WHERE name = "data-transfer-service" AND group = "DataTransfer"))))) WHERE endpoint = "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"))
|
|
@ -0,0 +1,43 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("AccessPointFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE inV("EService") FROM (
|
||||
TRAVERSE outE("Activates") FROM (
|
||||
TRAVERSE outV("HostingNode") FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM CPUFacet WHERE vendor = "GenuineIntel"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE value = "down"
|
||||
)
|
||||
) WHERE propagationConstraint.add = "propagate"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE name = "data-transfer-service" AND group = "DataTransfer"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE endpoint = "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "EService"
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"type": "StateFacet",
|
||||
"value": "down",
|
||||
"_in": {
|
||||
"_source": {
|
||||
"type": "ConsistsOf",
|
||||
"source" : {
|
||||
"type" : "EService",
|
||||
"id": "0255b7ec-e3da-4071-b456-9a2907ece1db"
|
||||
"id": "93995af0-4f95-4816-a53e-3e1bc27ef475"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: StateFacet, as: statefacet0, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
|
||||
.inE('ConsistsOf') { as: consistsof00, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.outV('EService') { as: eservice000, where: (($currentMatch['@class'] INSTANCEOF 'EService') AND (id = "93995af0-4f95-4816-a53e-3e1bc27ef475"))}
|
||||
.outE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.inV('StateFacet') { where: ($matched.statefacet0 == $currentMatch)}
|
||||
RETURN
|
||||
DISTINCT(statefacet0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( SELECT FROM EService WHERE id = "0255b7ec-e3da-4071-b456-9a2907ece1db"))) WHERE value = "down"
|
|
@ -0,0 +1,7 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
SELECT FROM EService WHERE id = "93995af0-4f95-4816-a53e-3e1bc27ef475"
|
||||
)
|
||||
)
|
||||
) WHERE value = "down" AND @class INSTANCEOF "StateFacet"
|
|
@ -1,19 +1,27 @@
|
|||
{
|
||||
"type": "StateFacet",
|
||||
"value": "down",
|
||||
"_in": {
|
||||
"_source": {
|
||||
"type": "ConsistsOf",
|
||||
"source" : {
|
||||
"type" : "EService",
|
||||
"$or": [
|
||||
{"$and": {
|
||||
"id" : "aec0ef31-c735-4a4c-b2f4-57dfbd2fe925",
|
||||
"metadata" :{ "createdBy": {"$ne": "luca.frosini"} }
|
||||
}},
|
||||
{"$and": {
|
||||
"id" : "0255b7ec-e3da-4071-b456-9a2907ece1db",
|
||||
"metadata" : { "createdBy": "DataTransfer:data-transfer-service:pc-frosini.isti.cnr.it_8080" }
|
||||
}}
|
||||
"_or": [
|
||||
{
|
||||
"_and": {
|
||||
"id" : "93995af0-4f95-4816-a53e-3e1bc27ef475",
|
||||
"metadata" : {
|
||||
"createdBy": {"_ne": "luca.frosini"}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"_and": {
|
||||
"id" : "bd4402a0-2b72-41c5-a970-321343649e7d",
|
||||
"metadata" : {
|
||||
"createdBy": "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: StateFacet, as: statefacet0, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
|
||||
.inE('ConsistsOf') { as: consistsof00, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.outV('EService') { as: eservice000, where: (($currentMatch['@class'] INSTANCEOF 'EService') AND (((id = "93995af0-4f95-4816-a53e-3e1bc27ef475" AND metadata.createdBy <> "luca.frosini") OR (id = "bd4402a0-2b72-41c5-a970-321343649e7d" AND metadata.createdBy = "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80"))))}
|
||||
.outE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.inV('StateFacet') { where: ($matched.statefacet0 == $currentMatch)}
|
||||
RETURN
|
||||
DISTINCT(statefacet0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( SELECT FROM EService WHERE ((id = "aec0ef31-c735-4a4c-b2f4-57dfbd2fe925" AND metadata.createdBy <> "luca.frosini") OR (id = "0255b7ec-e3da-4071-b456-9a2907ece1db" AND metadata.createdBy = "DataTransfer:data-transfer-service:pc-frosini.isti.cnr.it_8080"))))) WHERE value = "down"
|
|
@ -0,0 +1,11 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
SELECT FROM EService WHERE (
|
||||
(id = "93995af0-4f95-4816-a53e-3e1bc27ef475" AND metadata.createdBy <> "luca.frosini")
|
||||
OR
|
||||
(id = "bd4402a0-2b72-41c5-a970-321343649e7d" AND metadata.createdBy = "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80")
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE value = "down" AND @class INSTANCEOF "StateFacet"
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"type": "EService",
|
||||
"id": "0255b7ec-e3da-4071-b456-9a2907ece1db",
|
||||
"id": "93995af0-4f95-4816-a53e-3e1bc27ef475",
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "ConsistsOf",
|
||||
|
@ -24,17 +24,17 @@
|
|||
"type": "ConsistsOf",
|
||||
"target": {
|
||||
"type": "AccessPointFacet",
|
||||
"endpoint": "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"
|
||||
"endpoint": "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"
|
||||
}
|
||||
}
|
||||
],
|
||||
"isRelatedTo" : [
|
||||
{
|
||||
"type": "Activates",
|
||||
"id": "d3f58e52-5346-47bc-b736-9d77a0b554ce",
|
||||
"id": "bd89a311-780d-4efe-93e5-08281e53bce7",
|
||||
"source": {
|
||||
"type": "HostingNode",
|
||||
"id" : "5fbc1a56-d450-4f0f-85c1-9b1684581717",
|
||||
"id" : "44fac329-eed5-4f18-90ba-a54d5aad316e",
|
||||
"consistsOf": [
|
||||
{
|
||||
"type": "ConsistsOf",
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: EService, as: eservice0, where: (($currentMatch['@class'] INSTANCEOF 'EService') AND (id = "93995af0-4f95-4816-a53e-3e1bc27ef475"))}
|
||||
|
||||
.inE('Activates') { as: activates00, where: (($currentMatch['@class'] INSTANCEOF 'Activates') AND (id = "bd89a311-780d-4efe-93e5-08281e53bce7"))}
|
||||
.outV('HostingNode') { as: hostingnode000, where: (($currentMatch['@class'] INSTANCEOF 'HostingNode') AND (id = "44fac329-eed5-4f18-90ba-a54d5aad316e"))}
|
||||
.outE('ConsistsOf') { as: consistsof0000, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('CPUFacet') { as: cpufacet00000, where: (($currentMatch['@class'] INSTANCEOF 'CPUFacet') AND (vendor = "GenuineIntel"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof0000 == $currentMatch)}
|
||||
.outV('HostingNode') { where: ($matched.hostingnode000 == $currentMatch)}
|
||||
.outE('Activates') { where: ($matched.activates00 == $currentMatch)}
|
||||
.inV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof01, where: (($currentMatch['@class'] INSTANCEOF 'ConsistsOf') AND (propagationConstraint.add = "propagate"))}
|
||||
.inV('StateFacet') { as: statefacet010, where: (($currentMatch['@class'] INSTANCEOF 'StateFacet') AND (value = "down"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof01 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby02, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet020, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (name = "data-transfer-service" AND group = "DataTransfer"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby02 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
.outE('ConsistsOf') { as: consistsof03, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.inV('AccessPointFacet') { as: accesspointfacet030, where: (($currentMatch['@class'] INSTANCEOF 'AccessPointFacet') AND (endpoint = "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"))}
|
||||
.inE('ConsistsOf') { where: ($matched.consistsof03 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(eservice0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
SELECT FROM ( TRAVERSE outV("EService") FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("AccessPointFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("EService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("EService") FROM ( SELECT FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE inV("EService") FROM ( TRAVERSE outE("Activates") FROM ( SELECT FROM ( TRAVERSE outV("HostingNode") FROM ( TRAVERSE inE("ConsistsOf") FROM ( SELECT FROM ( TRAVERSE inV("CPUFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("HostingNode") FROM ( SELECT FROM Activates WHERE id = "d3f58e52-5346-47bc-b736-9d77a0b554ce")))) WHERE vendor = "GenuineIntel"))) WHERE id = "5fbc1a56-d450-4f0f-85c1-9b1684581717"))))) WHERE value = "down")) WHERE propagationConstraint.add = "propagate")))) WHERE name = "data-transfer-service" AND group = "DataTransfer"))))) WHERE endpoint = "http://pc-frosini.isti.cnr.it:8080/data-transfer-service/gcube/service"))) WHERE id = "0255b7ec-e3da-4071-b456-9a2907ece1db"
|
|
@ -0,0 +1,47 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("AccessPointFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE inV("EService") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE outE("Activates") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE outV("HostingNode") FROM (
|
||||
TRAVERSE inE("ConsistsOf") FROM (
|
||||
SELECT FROM CPUFacet WHERE vendor = "GenuineIntel"
|
||||
)
|
||||
)
|
||||
) WHERE id = "44fac329-eed5-4f18-90ba-a54d5aad316e"
|
||||
)
|
||||
) WHERE id = "bd89a311-780d-4efe-93e5-08281e53bce7"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE value = "down"
|
||||
)
|
||||
) WHERE propagationConstraint.add = "propagate"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE name = "data-transfer-service" AND group = "DataTransfer"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE endpoint = "http://smartexecutor1.dev.int.d4science.net:80/data-transfer-service/gcube/resource"
|
||||
)
|
||||
)
|
||||
) WHERE id = "93995af0-4f95-4816-a53e-3e1bc27ef475" AND @class INSTANCEOF "EService"
|
|
@ -1,18 +1,26 @@
|
|||
{
|
||||
"type": "StateFacet",
|
||||
"_in": {
|
||||
"_source": {
|
||||
"type": "ConsistsOf",
|
||||
"source" : {
|
||||
"type" : "EService",
|
||||
"$or": [
|
||||
{"$and": {
|
||||
"id" : "aec0ef31-c735-4a4c-b2f4-57dfbd2fe925",
|
||||
"metadata" :{ "createdBy": {"$ne": "luca.frosini"} }
|
||||
}},
|
||||
{"$and": {
|
||||
"id" : "0255b7ec-e3da-4071-b456-9a2907ece1db",
|
||||
"metadata" : { "createdBy": "DataTransfer:data-transfer-service:pc-frosini.isti.cnr.it_8080" }
|
||||
}}
|
||||
"_or": [
|
||||
{
|
||||
"_and": {
|
||||
"id" : "93995af0-4f95-4816-a53e-3e1bc27ef475",
|
||||
"metadata" : {
|
||||
"createdBy": {"_ne": "luca.frosini"}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"_and": {
|
||||
"id" : "bd4402a0-2b72-41c5-a970-321343649e7d",
|
||||
"metadata" : {
|
||||
"createdBy": "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: StateFacet, as: statefacet0, where: ($currentMatch['@class'] INSTANCEOF 'StateFacet')}
|
||||
|
||||
.inE('ConsistsOf') { as: consistsof00, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.outV('EService') { as: eservice000, where: (($currentMatch['@class'] INSTANCEOF 'EService') AND (((id = "93995af0-4f95-4816-a53e-3e1bc27ef475" AND metadata.createdBy <> "luca.frosini") OR (id = "bd4402a0-2b72-41c5-a970-321343649e7d" AND metadata.createdBy = "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80"))))}
|
||||
.outE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.inV('StateFacet') { where: ($matched.statefacet0 == $currentMatch)}
|
||||
RETURN
|
||||
DISTINCT(statefacet0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
SELECT FROM ( TRAVERSE inV("StateFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( SELECT FROM EService WHERE ((id = "aec0ef31-c735-4a4c-b2f4-57dfbd2fe925" AND metadata.createdBy <> "luca.frosini") OR (id = "0255b7ec-e3da-4071-b456-9a2907ece1db" AND metadata.createdBy = "DataTransfer:data-transfer-service:pc-frosini.isti.cnr.it_8080")))))
|
|
@ -0,0 +1,11 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE inV("StateFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
SELECT FROM EService
|
||||
WHERE (
|
||||
(id = "93995af0-4f95-4816-a53e-3e1bc27ef475" AND metadata.createdBy <> "luca.frosini") OR
|
||||
(id = "bd4402a0-2b72-41c5-a970-321343649e7d" AND metadata.createdBy = "DataTransfer:data-transfer-service:smartexecutor1.dev.int.d4science.net_80")
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "StateFacet"
|
|
@ -0,0 +1,21 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: CallsFor, as: callsfor0, where: ($currentMatch['@class'] INSTANCEOF 'CallsFor')}
|
||||
|
||||
.inV('VirtualService') { as: virtualservice00, where: ($currentMatch['@class'] INSTANCEOF 'VirtualService')}
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby000, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet0000, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby000 == $currentMatch)}
|
||||
.outV('VirtualService') { where: ($matched.virtualservice00 == $currentMatch)}
|
||||
.inE('CallsFor') { where: ($matched.callsfor0 == $currentMatch)}
|
||||
|
||||
.outV('EService') { as: eservice01, where: ($currentMatch['@class'] INSTANCEOF 'EService')}
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby010, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet0100, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (group = "org.gcube.data-catalogue" AND name = "gcat"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby010 == $currentMatch)}
|
||||
.outV('EService') { where: ($matched.eservice01 == $currentMatch)}
|
||||
.outE('CallsFor') { where: ($matched.callsfor0 == $currentMatch)}
|
||||
|
||||
RETURN
|
||||
DISTINCT(callsfor0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
TRAVERSE inE("CallsFor") FROM ( TRAVERSE outV("VirtualService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE inV("VirtualService") FROM ( TRAVERSE outE("CallsFor") FROM ( TRAVERSE outV("EService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("EService") FROM ( SELECT FROM CallsFor)))) WHERE group = "org.gcube.data-catalogue" AND name = "gcat"))))))) WHERE group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service")))
|
|
@ -0,0 +1,23 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE inE("CallsFor") FROM (
|
||||
TRAVERSE outV("VirtualService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("SoftwareFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE inV("VirtualService") FROM (
|
||||
TRAVERSE outE("CallsFor") FROM (
|
||||
TRAVERSE outV("EService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM SoftwareFacet WHERE group = "org.gcube.data-catalogue" AND name = "gcat"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service"
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "CallsFor"
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"type": "SimpleFacet",
|
||||
"_in": {
|
||||
"_source": {
|
||||
"type": "ConsistsOf",
|
||||
"source": {
|
||||
"type": "Configuration",
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
SELECT EXPAND(ret) FROM (
|
||||
MATCH
|
||||
{class: SimpleFacet, as: simplefacet0, where: ($currentMatch['@class'] INSTANCEOF 'SimpleFacet')}
|
||||
|
||||
.inE('ConsistsOf') { as: consistsof00, where: ($currentMatch['@class'] INSTANCEOF 'ConsistsOf')}
|
||||
.outV('Configuration') { as: configuration000, where: ($currentMatch['@class'] INSTANCEOF 'Configuration')}
|
||||
.inE('IsCustomizedBy') { as: iscustomizedby0000, where: ($currentMatch['@class'] INSTANCEOF 'IsCustomizedBy')}
|
||||
.outV('VirtualService') { as: virtualservice00000, where: ($currentMatch['@class'] INSTANCEOF 'VirtualService')}
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby000000, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('SoftwareFacet') { as: softwarefacet0000000, where: (($currentMatch['@class'] INSTANCEOF 'SoftwareFacet') AND (group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby000000 == $currentMatch)}
|
||||
.outV('VirtualService') { where: ($matched.virtualservice00000 == $currentMatch)}
|
||||
.outE('IsCustomizedBy') { where: ($matched.iscustomizedby0000 == $currentMatch)}
|
||||
.inV('Configuration') { where: ($matched.configuration000 == $currentMatch)}
|
||||
.outE('IsIdentifiedBy') { as: isidentifiedby0001, where: ($currentMatch['@class'] INSTANCEOF 'IsIdentifiedBy')}
|
||||
.inV('IdentifierFacet') { as: identifierfacet00010, where: (($currentMatch['@class'] INSTANCEOF 'IdentifierFacet') AND (value = "gcat-configuration"))}
|
||||
.inE('IsIdentifiedBy') { where: ($matched.isidentifiedby0001 == $currentMatch)}
|
||||
.outV('Configuration') { where: ($matched.configuration000 == $currentMatch)}
|
||||
.outE('ConsistsOf') { where: ($matched.consistsof00 == $currentMatch)}
|
||||
.inV('SimpleFacet') { where: ($matched.simplefacet0 == $currentMatch)}
|
||||
RETURN
|
||||
DISTINCT(simplefacet0) as ret
|
||||
)
|
|
@ -1 +0,0 @@
|
|||
SELECT FROM ( TRAVERSE inV("SimpleFacet") FROM ( TRAVERSE outE("ConsistsOf") FROM ( TRAVERSE outV("Configuration") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("IdentifierFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE inV("Configuration") FROM ( TRAVERSE outE("IsCustomizedBy") FROM ( TRAVERSE outV("VirtualService") FROM ( TRAVERSE inE("IsIdentifiedBy") FROM ( SELECT FROM ( TRAVERSE inV("SoftwareFacet") FROM ( TRAVERSE outE("IsIdentifiedBy") FROM ( TRAVERSE outV("VirtualService") FROM ( SELECT FROM IsCustomizedBy)))) WHERE group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service"))))))) WHERE value = "gcat-configuration")))))
|
|
@ -0,0 +1,25 @@
|
|||
SELECT FROM (
|
||||
TRAVERSE inV("SimpleFacet") FROM (
|
||||
TRAVERSE outE("ConsistsOf") FROM (
|
||||
TRAVERSE outV("Configuration") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM (
|
||||
TRAVERSE inV("IdentifierFacet") FROM (
|
||||
TRAVERSE outE("IsIdentifiedBy") FROM (
|
||||
TRAVERSE inV("Configuration") FROM (
|
||||
TRAVERSE outE("IsCustomizedBy") FROM (
|
||||
TRAVERSE outV("VirtualService") FROM (
|
||||
TRAVERSE inE("IsIdentifiedBy") FROM (
|
||||
SELECT FROM SoftwareFacet WHERE group = "org.gcube.data-catalogue" AND name = "catalogue-virtual-service"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE value = "gcat-configuration"
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) WHERE @class INSTANCEOF "SimpleFacet"
|
Loading…
Reference in New Issue