Datastax Driver Fully Implemented - Partially Tested - return of deprecated methods

This commit is contained in:
Ahmed Salah Tawfik Ibrahim 2023-10-30 18:52:02 +01:00
parent 6465e88378
commit 3bd81a415f
6 changed files with 2194 additions and 1706 deletions

View File

@ -4,9 +4,9 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.18.0] - 2022-09-20
## [v2.0.0] - 2023-10-30
- Feature #23891, The (wrongly named) Feed class has been changed to Post, all the methods have been changed accordingly and the old one set as deprecated
- Feature #25901-fix, same as feature 25901 but with minor fixes related to deprecated methods and classes
## [v1.17.0] - 2022-05-13

89
pom.xml
View File

@ -1,6 +1,6 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>maven-parent</artifactId>
@ -11,7 +11,7 @@
<groupId>org.gcube.portal</groupId>
<artifactId>social-networking-library</artifactId>
<version>1.18.0</version>
<version>2.0.0-SNAPSHOT</version>
<name>gCube Social Networking Library</name>
<description>
The gCube Social Networking Library is the 'bridge' between your gCube Applications and the social networking facilities.
@ -28,57 +28,39 @@
<gwtVersion>2.8.1</gwtVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<cassandra.driver.oss.version>4.13.0</cassandra.driver.oss.version>
<logback.version>1.2.3</logback.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>maven-portal-bom</artifactId>
<version>3.6.4</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-query-builder</artifactId>
<version>${cassandra.driver.oss.version}</version>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-mapper-runtime</artifactId>
<version>${cassandra.driver.oss.version}</version>
</dependency>
<dependency>
<groupId>com.google</groupId>
<artifactId>gwt-jsonmaker</artifactId>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-core</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-thrift</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-cassandra</artifactId>
<scope>provided</scope>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<version>[1.0.5-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.common.portal</groupId>
<artifactId>portal-manager</artifactId>
<scope>provided</scope>
<version>[2.4.2-SNAPSHOT,3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
<scope>provided</scope>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
@ -89,15 +71,21 @@
<groupId>com.google.gwt</groupId>
<artifactId>gwt-user</artifactId>
<version>${gwtVersion}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.6.4</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
</dependencies>
<build>
@ -135,6 +123,31 @@
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.4.2</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>org.gcube.portal.databook.server.Tester</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>assemble-all</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -1,33 +1,25 @@
package org.gcube.portal.databook.server;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.net.InetSocketAddress;
import java.time.Duration;
import java.util.List;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.CqlSessionBuilder;
import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
import com.datastax.oss.driver.api.core.cql.ResultSet;
import com.datastax.oss.driver.api.core.metadata.Metadata;
import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.querybuilder.SchemaBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.ColumnDefinition;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.ddl.SchemaChangeResult;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.thrift.ddl.ThriftColumnDefinitionImpl;
import com.netflix.astyanax.thrift.ddl.ThriftColumnFamilyDefinitionImpl;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Costantino Perciante ISTI-CNR
* @author Ahmed Salah Tawfik Ibrahim ISTI-CNR
*
*/
public class CassandraClusterConnection {
@ -39,304 +31,473 @@ public class CassandraClusterConnection {
/**
* keyspace location
*/
private static String clusterName;
private static String host;
private static List<InetSocketAddress> hosts;
private static String datacenterName;
private static String keyspaceName;
private CqlSession myKeyspaceSession;
private Keyspace myKeyspace;
/**
*
* @param dropSchema set true if you want do drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema) {
if (clusterName == null || host == null || keyspaceName == null) {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(null);
clusterName = cluster.getClusterName();
host = cluster.getHost();
//host = cluster.getHost();
hosts = cluster.getHosts();
datacenterName = cluster.getDatacenterName();
keyspaceName = cluster.getKeyspaceName();
}
AstyanaxContext<Cluster> clusterContext = new AstyanaxContext.Builder()
.forCluster(clusterName)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(
clusterName).setMaxConnsPerHost(100)
.setSeeds(host))
.withConnectionPoolMonitor(
new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
_log.info(keyspaceName + " KeySpace SetUp ...");
SetUpKeySpaces(clusterContext, dropSchema);
SetUpKeySpaces(dropSchema);
myKeyspaceSession = connect(keyspaceName);
_log.info("CONNECTED! using KeySpace: " + keyspaceName);
// then close connection pool for cluster
_log.info("Closing cluster connection pool no longer needed (keyspace one will be used)");
clusterContext.shutdown();
_log.info("Closed cluster connection pool no longer needed (keyspace one will be used)");
}
/**
* Close the connection pool
*/
public void closeConnection(){
if(myKeyspace != null){
try{
_log.info("Closing pool connection");
myKeyspace.getConnectionPool().shutdown();
_log.info("Pool closed!");
}catch(Exception e){
_log.error("Unable to close connection pool", e);
}
}
}
/**
*
* @param dropSchema set true if you want do drop the current and set up new one
* @param dropSchema set true if you want to drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema, String infrastructureName) {
if (clusterName == null || host == null || keyspaceName == null) {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(infrastructureName);
clusterName = cluster.getClusterName();
host = cluster.getHost();
//host = cluster.getHost();
hosts = cluster.getHosts();
datacenterName = cluster.getDatacenterName();
keyspaceName = cluster.getKeyspaceName();
}
AstyanaxContext<Cluster> clusterContext = new AstyanaxContext.Builder()
.forCluster(clusterName)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(
clusterName).setMaxConnsPerHost(100)
.setSeeds(host))
.withConnectionPoolMonitor(
new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
_log.info(keyspaceName + " KeySpace SetUp ...");
SetUpKeySpaces(clusterContext, dropSchema);
SetUpKeySpaces(dropSchema);
myKeyspaceSession = connect(keyspaceName);
_log.info("CONNECTED! using KeySpace: " + keyspaceName);
}
// then close connection pool for cluster
_log.info("Closing cluster connection pool no longer needed (keyspace one will be used)");
clusterContext.shutdown();
_log.info("Closed cluster connection pool no longer needed (keyspace one will be used)");
public CqlSession getKeyspaceSession(){
if (myKeyspaceSession.isClosed()){
myKeyspaceSession = connect(keyspaceName);
}
return myKeyspaceSession;
}
/**
* Get the reference to the current keyspace
* @return keyspace reference
* @param dropSchema set true if you want to drop the current and set up new one
* the connection to cassandra cluster
*/
public Keyspace getKeyspace() {
// The Keyspace instance can be shared among different requests
if(myKeyspace == null){
synchronized(this){
if(myKeyspace == null){ // double checked lock
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forCluster(clusterName)
.forKeyspace(keyspaceName)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE) // use only the host given as seeds (do not discover)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN) // how to handle connections of the the connection pool
)
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setMaxConnsPerHost(3) // for each seed(host)
.setSocketTimeout(2000) //-> default: 11 seconds
//.setConnectTimeout(1000) -> default: 2 seconds
.setSeeds(host)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
context.start();
// save keyspace reference
myKeyspace = context.getEntity();
}
public void SetUpKeySpaces(boolean dropSchema) {
boolean createNew = false;
boolean found = false;
CqlSession session = connect();
Metadata metaData = session.getMetadata();
for (KeyspaceMetadata meta : metaData.getKeyspaces().values()) {
if (meta.getName().toString().equals(keyspaceName)){
found = true;
break;
}
}
return myKeyspace;
}
/**
*
* @param clusterContext
* @param dropSchema
* @throws ConnectionException
*/
public void SetUpKeySpaces(AstyanaxContext<Cluster> clusterContext, boolean dropSchema) {
boolean createNew = false;
clusterContext.start();
try {
Cluster cluster = clusterContext.getEntity();
KeyspaceDefinition keyspaceDef = cluster.describeKeyspace(keyspaceName);
if (dropSchema && keyspaceDef != null) {
if (dropSchema && found) {
_log.info("Dropping Keyspace: " + keyspaceName + " ...");
try {
OperationResult<SchemaChangeResult> returned = cluster.dropKeyspace(keyspaceName);
ResultSet returned = dropKeyspace();
Thread.sleep(2000);
_log.info("Dropped " + returned.getResult().toString());
} catch (ConnectionException e) {
if (returned.wasApplied())
_log.info("Dropped " + keyspaceName);
else
_log.info("Couldn't drop " + keyspaceName);
} catch (Exception e) {
_log.error("Dropping Keyspace operation Failed ... " + keyspaceName + " does NOT exists");
return;
} catch (InterruptedException e) {
e.printStackTrace();
}
createNew = true;
}
keyspaceDef = cluster.makeKeyspaceDefinition();
keyspaceDef = cluster.describeKeyspace(keyspaceName);
if (keyspaceDef == null || keyspaceDef.getName() == null || createNew) {
if (!found || createNew) {
_log.info("Keyspace does not exist, triggering schema creation ... ");
createSchema(cluster);
_log.info("Cluster " + clusterName + " on " + host + " Initialized OK!");
int replicationFactor = 2;
createKeyspace(keyspaceName, replicationFactor);
closeSession(session);
createTables();
_log.info("Using Keyspace " + keyspaceName);
}
} catch (ConnectionException e) {
} catch (Exception e) {
e.printStackTrace();
}
}
/*
*
********************** CASSANDRA KEYSPACE CREATION ***********************
*
*/
/**
* create the databook schema
* @return
* @throws ConnectionException
*/
private void createSchema(Cluster cluster) throws ConnectionException {
Map<String, String> stratOptions = new HashMap<String, String>();
stratOptions.put("replication_factor", "1");
KeyspaceDefinition ksDef = cluster.makeKeyspaceDefinition();
//get static column families with secondary indexes
/**
* define Notifications CF with Type as secondary index
*/
ColumnFamilyDefinition cfDefNotifications = getStaticCFDef(DBCassandraAstyanaxImpl.NOTIFICATIONS, "Type");
/**
* define Feeds CF with Privacy as secondary index
*/
ColumnFamilyDefinition cfDefFeeds = getStaticCFDef(DBCassandraAstyanaxImpl.FEEDS, "Privacy");
/**
* define Comments CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefComments = getStaticCFDef(DBCassandraAstyanaxImpl.COMMENTS, "Feedid");
/**
* define Likes CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefLikes = getStaticCFDef(DBCassandraAstyanaxImpl.LIKES, "Feedid");
/**
* define Invites CF with SenderUserId as secondary index
*/
ColumnFamilyDefinition cfDefInvites = getStaticCFDef(DBCassandraAstyanaxImpl.INVITES, "SenderUserId");
/**
* define Attachments CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefAttachments = getStaticCFDef(DBCassandraAstyanaxImpl.ATTACHMENTS, "feedId");
//get dynamic column families, act as auxiliary indexes
ColumnFamilyDefinition cfDefConn = getDynamicCFDef(DBCassandraAstyanaxImpl.CONNECTIONS);
ColumnFamilyDefinition cfDefPendingConn = getDynamicCFDef(DBCassandraAstyanaxImpl.PENDING_CONNECTIONS_CF_NAME);
ColumnFamilyDefinition cfDefVRETimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.VRE_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefAPPTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.APP_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefUserTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefUserLikedFeeds = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_LIKED_FEEDS);
ColumnFamilyDefinition cfDefUserNotifications = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS);
ColumnFamilyDefinition cfDefUserNotificationsUnread = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS_UNREAD);
ColumnFamilyDefinition cfDefUserNotificationsPreferences = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS_PREFERENCES);
ColumnFamilyDefinition cfDefEmailInvitesTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.EMAIL_INVITES);
ColumnFamilyDefinition cfDefVREInvitesTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.VRE_INVITES);
ColumnFamilyDefinition cfDefHashtagsCounter = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGS_COUNTER);
ColumnFamilyDefinition cfDefHashtagTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGGED_FEEDS);
ColumnFamilyDefinition cfDefHashtagCommentsTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGGED_COMMENTS);
ksDef.setName(keyspaceName)
.setStrategyOptions(stratOptions)
.setStrategyClass("SimpleStrategy")
.addColumnFamily(cfDefNotifications)
.addColumnFamily(cfDefFeeds)
.addColumnFamily(cfDefComments)
.addColumnFamily(cfDefLikes)
.addColumnFamily(cfDefInvites)
.addColumnFamily(cfDefAttachments)
.addColumnFamily(cfDefConn)
.addColumnFamily(cfDefPendingConn)
.addColumnFamily(cfDefVRETimeline)
.addColumnFamily(cfDefAPPTimeline)
.addColumnFamily(cfDefUserTimeline)
.addColumnFamily(cfDefUserNotifications)
.addColumnFamily(cfDefUserNotificationsUnread)
.addColumnFamily(cfDefUserNotificationsPreferences)
.addColumnFamily(cfDefUserLikedFeeds)
.addColumnFamily(cfDefEmailInvitesTimeline)
.addColumnFamily(cfDefVREInvitesTimeline)
.addColumnFamily(cfDefHashtagsCounter)
.addColumnFamily(cfDefHashtagTimeline)
.addColumnFamily(cfDefHashtagCommentsTimeline);
cluster.addKeyspace(ksDef);
private static CqlSession connect() {
CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.build();
_log.info("[OK] Connected to Cassandra Cluster");
return cqlSession;
}
private static CqlSession connect(String KEYSPACE_NAME) {
CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withKeyspace(KEYSPACE_NAME)
.withLocalDatacenter("1")
.build();
_log.info("[OK] Connected to Keyspace {} ", KEYSPACE_NAME);
return cqlSession;
}
/**
* create a dynamic column family to be added in a keyspace
*
* @param cfName the CF name
* @return the instance to be added to the keyspace
*/
private ColumnFamilyDefinition getDynamicCFDef(String cfName) {
ColumnFamilyDefinition columnFamilyDefinition = new ThriftColumnFamilyDefinitionImpl();
columnFamilyDefinition.setName(cfName);
columnFamilyDefinition.setKeyValidationClass("UTF8Type");
columnFamilyDefinition.setComparatorType("UTF8Type");
return columnFamilyDefinition;
public static void closeSession(CqlSession session) {
if (session != null) session.close();
_log.info("[OK]Session is now closed");
}
/**
* create a static column family to be added in a keyspace with possibility to add a secondary index for a given column
*
* @param cfName the CF name
* @param secondaryIndexedField the column name of the column to index
* @return the instance to be added to the keyspace
*/
private ColumnFamilyDefinition getStaticCFDef(String cfName, String secondaryIndexedField) {
ColumnFamilyDefinition columnFamilyDefinition = new ThriftColumnFamilyDefinitionImpl();
columnFamilyDefinition.setName(cfName);
columnFamilyDefinition.setKeyValidationClass("UTF8Type");
columnFamilyDefinition.setComparatorType("UTF8Type");
//Add secondary index for userid
ColumnDefinition typeCDef = new ThriftColumnDefinitionImpl();
typeCDef.setName(secondaryIndexedField)
.setValidationClass("UTF8Type");
typeCDef.setIndex(secondaryIndexedField+"_"+UUID.randomUUID().toString().substring(0,5), "KEYS");
columnFamilyDefinition.addColumnDefinition(typeCDef);
return columnFamilyDefinition;
public void closeConnection(){
if(!myKeyspaceSession.isClosed()){
try{
_log.info("Closing connection");
closeSession(myKeyspaceSession);
_log.info("Connection closed!");
}catch(Exception e){
_log.error("Unable to close connection", e);
}
}
}
private static CqlSessionBuilder configBuilder(CqlSessionBuilder cqlSessionBuilder){
return cqlSessionBuilder
.withConfigLoader(DriverConfigLoader.programmaticBuilder()
// Resolves the timeout query 'SELECT * FROM system_schema.tables' timed out after PT2S
.withDuration(DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, Duration.ofMillis(240000))
.withDuration(DefaultDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, Duration.ofMillis(240000))
.withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(240000))
.build());
}
private static void createKeyspace(String keyspaceName, int replicationFactor) {
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter("1")
.build()) {
cqlSession.execute(SchemaBuilder.createKeyspace(keyspaceName)
.ifNotExists()
.withSimpleStrategy(replicationFactor)
.withDurableWrites(true)
.build());
_log.info("+ Keyspace '{}' created.", keyspaceName);
closeSession(cqlSession);
}
}
private static ResultSet dropKeyspace(){
ResultSet toreturn;
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter("1")
.build()) {
toreturn = cqlSession.execute(SchemaBuilder.dropKeyspace(keyspaceName).ifExists().build());
_log.info("Keyspace {} dropped.", keyspaceName);
closeSession(cqlSession);
}
return toreturn;
}
private void createTables(){
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter("1")
.withKeyspace(keyspaceName)
.build()) {
createTableUSERNotificationsPreferences(cqlSession);
createTableUSERNotifications(cqlSession);
createTableVRETimeline(cqlSession);
createTableAppTimeline(cqlSession);
createTableUSERTimeline(cqlSession);
createTableHashtaggedPosts(cqlSession);
createTableHashtaggedComments(cqlSession);
createTableHashtagsCounter(cqlSession);
createTableUSERNotificationsUnread(cqlSession);
createTableUSERLikes(cqlSession);
createTableVREInvites(cqlSession);
createTableEMAILInvites(cqlSession);
createTableAttachments(cqlSession);
createTableInvites(cqlSession);
createTableLikes(cqlSession);
createTableComments(cqlSession);
createTableNotifications(cqlSession);
createTablePosts(cqlSession);
closeSession(cqlSession);
}
}
private void createTableUSERNotificationsPreferences(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserNotificationsPreferences")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("type", DataTypes.TEXT)
.withColumn("preference", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotificationsPreferences");
}
private void createTableUSERNotifications(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserNotifications")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("notid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotifications");
}
private void createTableVRETimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("VRETimeline")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "VRETimeline");
}
private void createTableAppTimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("AppTimeline")
.ifNotExists()
.withPartitionKey("appid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "AppTimeline");
}
private void createTableUSERTimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserTimeline")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERTimeline");
}
private void createTableHashtaggedPosts(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtaggedPosts")
.ifNotExists()
.withPartitionKey("hashtag", DataTypes.TEXT)
.withPartitionKey("postid", DataTypes.UUID)
.withColumn("vreid", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtaggedPosts");
}
private void createTableHashtaggedComments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtaggedComments")
.ifNotExists()
.withPartitionKey("hashtag", DataTypes.TEXT)
.withPartitionKey("commentid", DataTypes.UUID)
.withColumn("vreid", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtaggedComments");
}
private void createTableHashtagsCounter(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtagsCounter")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("hashtag", DataTypes.TEXT)
.withColumn("count", DataTypes.BIGINT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtagsCounter");
}
private void createTableUSERNotificationsUnread(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserUnreadNotifications")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("notid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotificationsUnread");
}
private void createTableUSERLikes(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserLikes")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("likeid", DataTypes.UUID)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERLikes");
}
private void createTableVREInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("VREInvites")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("inviteid", DataTypes.UUID)
.withColumn("status", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "VREInvites");
}
private void createTableEMAILInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("EmailInvites")
.ifNotExists()
.withPartitionKey("email", DataTypes.TEXT)
.withPartitionKey("vreid", DataTypes.TEXT)
.withColumn("inviteid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "EMAILInvites");
}
private void createTableAttachments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Attachments")
.ifNotExists()
.withPartitionKey("attachid", DataTypes.UUID)
.withColumn("postid", DataTypes.UUID)
.withColumn("uri", DataTypes.TEXT)
.withColumn("name", DataTypes.TEXT)
.withColumn("description", DataTypes.TEXT)
.withColumn("urithumbnail", DataTypes.TEXT)
.withColumn("mimetype", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_attach")
.ifNotExists()
.onTable("Attachments")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Attachments");
}
private void createTableInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Invites")
.ifNotExists()
.withPartitionKey("inviteid", DataTypes.UUID)
.withColumn("senderuserid", DataTypes.TEXT)
.withColumn("vreid", DataTypes.TEXT)
.withColumn("email", DataTypes.TEXT)
.withColumn("controlcode", DataTypes.TEXT)
.withColumn("status", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("senderfullname", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("sender")
.ifNotExists()
.onTable("Invites")
.andColumn("senderuserid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Invites");
}
private void createTableLikes(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Likes")
.ifNotExists()
.withPartitionKey("likeid", DataTypes.UUID)
.withColumn("userid", DataTypes.TEXT)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("postid", DataTypes.UUID)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_likes")
.ifNotExists()
.onTable("Likes")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Likes");
}
private void createTableComments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Comments")
.ifNotExists()
.withPartitionKey("commentid", DataTypes.UUID)
.withColumn("userid", DataTypes.TEXT)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("comment", DataTypes.TEXT)
.withColumn("postid", DataTypes.UUID)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("isedit", DataTypes.BOOLEAN)
.withColumn("lastedittime", DataTypes.TIMESTAMP)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_comments")
.ifNotExists()
.onTable("Comments")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Comments");
}
private void createTableNotifications(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Notifications")
.ifNotExists()
.withPartitionKey("notid", DataTypes.UUID)
.withColumn("type", DataTypes.TEXT)
.withColumn("userid", DataTypes.TEXT)
.withColumn("subjectid", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("description", DataTypes.TEXT)
.withColumn("uri", DataTypes.TEXT)
.withColumn("senderid", DataTypes.TEXT)
.withColumn("senderfullname", DataTypes.TEXT)
.withColumn("senderthumbnailurl", DataTypes.TEXT)
.withColumn("isread", DataTypes.BOOLEAN)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("not_type")
.ifNotExists()
.onTable("Notifications")
.andColumn("type")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Notifications");
}
private void createTablePosts(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Posts")
.ifNotExists()
.withPartitionKey("postid", DataTypes.UUID)
.withColumn("linkhost", DataTypes.TEXT)
.withColumn("description", DataTypes.TEXT)
.withColumn("email", DataTypes.TEXT)
.withColumn("likesno", DataTypes.BIGINT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("linkdescription", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("uri", DataTypes.TEXT)
.withColumn("isapplicationpost", DataTypes.BOOLEAN)
.withColumn("entityid", DataTypes.TEXT)
.withColumn("privacy", DataTypes.TEXT)
.withColumn("type", DataTypes.TEXT)
.withColumn("urithumbnail", DataTypes.TEXT)
.withColumn("vreid", DataTypes.TEXT)
.withColumn("multifileupload", DataTypes.BOOLEAN)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("commentsno", DataTypes.BIGINT)
.withColumn("linktitle", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("posts_privacy")
.ifNotExists()
.onTable("Posts")
.andColumn("privacy")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Posts");
}
}

View File

@ -7,9 +7,14 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.metadata.Metadata;
import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
import org.gcube.common.portal.GCubePortalConstants;
import org.gcube.common.portal.PortalContext;
import org.gcube.common.resources.gcore.ServiceEndpoint;
@ -22,8 +27,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Ahmed Salah Tawfik Ibrahim ISTI-CNR
*
* @version 0.1 Dec 2012
* @version 2.0.0 October 2023
*
*/
@SuppressWarnings("serial")
@ -38,7 +44,7 @@ public class RunningCluster implements Serializable {
*/
private static final String HOST_PROPERTY = "host";
private static final String HOST_PORT_PROPERTY = "port";
private static final String CLUSTER_NAME_PROPERTY = "cluster";
private static final String DATACENTER_NAME_PROPERTY = "datacenter";
private static final String KEY_SPACE_NAME_PROPERTY = "keyspace";
/**
* other constants
@ -56,11 +62,11 @@ public class RunningCluster implements Serializable {
/**
* Cluster Name
*/
private String clusterName;
private String datacenterName;
/**
* Keyspace Name
*/
private String keyspaceName;
private String keyspaceName; //to be modified
/**
* @param infrastructureName could be null
@ -76,8 +82,8 @@ public class RunningCluster implements Serializable {
* private constructor
*/
private RunningCluster(String infrastructureName) {
try {
List<ServiceEndpoint> resources = getConfigurationFromIS(infrastructureName);
//Query the IS (for the future)
/*List<ServiceEndpoint> resources = getConfigurationFromIS(infrastructureName);
if (resources.size() > 1) {
_log.error("Too many Runtime Resource having name " + RUNTIME_RESOURCE_NAME +" in this scope ");
throw new TooManyRunningClustersException("There exist more than 1 Runtime Resource in this scope having name "
@ -97,8 +103,13 @@ public class RunningCluster implements Serializable {
}
} catch (Exception e) {
e.printStackTrace();
}
}*/
host = "10.1.28.55:9042, 10.1.30.142:9042, 10.1.28.100:9042";
datacenterName = "1";
keyspaceName = "dev_mig_new_schema_test";
}
/**
*
* @return the
@ -125,13 +136,14 @@ public class RunningCluster implements Serializable {
}
private String readInfrastructureName() {
Properties props = new Properties();
try {
StringBuilder sb = new StringBuilder(getCatalinaHome());
sb.append(File.separator)
.append(PortalContext.CONFIGURATION_FOLDER)
.append(File.separator)
.append(PortalContext.INFRA_PROPERTY_FILENAME);
.append(PortalContext.CONFIGURATION_FOLDER)
.append(File.separator)
.append(PortalContext.INFRA_PROPERTY_FILENAME);
String propertyfile = sb.toString();
File propsFile = new File(propertyfile);
FileInputStream fis = new FileInputStream(propsFile);
@ -153,7 +165,7 @@ public class RunningCluster implements Serializable {
try {
props.load(CassandraClusterConnection.class.getResourceAsStream(DEFAULT_CONFIGURATION));
host = props.getProperty(HOST_PROPERTY) + ":" + props.getProperty(HOST_PORT_PROPERTY);
clusterName = props.getProperty(CLUSTER_NAME_PROPERTY);
datacenterName = props.getProperty(DATACENTER_NAME_PROPERTY);
keyspaceName = props.getProperty(KEY_SPACE_NAME_PROPERTY);
} catch (IOException e) {
e.printStackTrace();
@ -169,25 +181,19 @@ public class RunningCluster implements Serializable {
this.host = host;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getKeyspaceName() {
return keyspaceName;
}
public void setKeyspaceName(String keyspaceName) {
this.keyspaceName = keyspaceName;
}
@Override
public String toString() {
return "RunningCluster [host=" + host + ", clusterName=" + clusterName
return "RunningCluster [host=" + host + ", datacenterName=" + datacenterName
+ ", keyspaceName=" + keyspaceName + "]";
}
/**
@ -197,4 +203,21 @@ public class RunningCluster implements Serializable {
private static String getCatalinaHome() {
return (System.getenv("CATALINA_HOME").endsWith("/") ? System.getenv("CATALINA_HOME") : System.getenv("CATALINA_HOME")+"/");
}
public void setDatacenterName(String datacenterName){
this.datacenterName = datacenterName;
}
public String getDatacenterName() {
return datacenterName;
}
public List<InetSocketAddress> getHosts() {
List<InetSocketAddress> hosts = new ArrayList<>();
String [] ips = host.split(", ");
for (String ip: ips){
String[] ip_port = ip.split(":");
hosts.add(new InetSocketAddress(ip_port[0], Integer.parseInt(ip_port[1])));
}
return hosts;
}
}

View File

@ -0,0 +1,68 @@
package org.gcube.portal.databook.server;
public class Schema {
//Tables
public static final String NOTIFICATIONS = "Notifications";
public static final String POSTS = "Posts";
public static final String COMMENTS = "Comments";
public static final String LIKES = "Likes";
public static final String INVITES = "Invites";
public static final String VRE_TIMELINE_POSTS = "VRETimeline";
public static final String USER_TIMELINE_POSTS = "UserTimeline";
public static final String APP_TIMELINE_POSTS = "AppTimeline";
public static final String USER_LIKED_POSTS = "UserLikes";
public static final String USER_NOTIFICATIONS = "UserNotifications"; // regular user notifications timeline (both read and unread, messages are included)
public static final String USER_NOTIFICATIONS_UNREAD = "UserUnreadNotifications"; // only unread user notifications/ notifications messages
public static final String USER_NOTIFICATIONS_PREFERENCES = "UserNotificationsPreferences"; // preferences for notifications
public static final String HASHTAGS_COUNTER = "HashtagsCounter"; // count the hashtags per group and type
public static final String HASHTAGGED_POSTS = "HashtaggedPosts"; // contains hashtags per type associated with vre and POST
public static final String HASHTAGGED_COMMENTS = "HashtaggedComments"; // contains hashtags per type associated with vre and comment
public static final String VRE_INVITES = "VREInvites"; //contains the emails that were invited per VRE
public static final String EMAIL_INVITES = "EmailInvites"; //contains the list of invitation per email
public static final String ATTACHMENTS = "Attachments"; //contains the list of all the attachments in a POST
//columns
public static final String USER_ID = "userid"; //text
public static final String TYPE = "type"; //text
public static final String PREFERENCE = "preference"; //text
public static final String TIMESTAMP = "timestamp"; //timestamp
public static final String NOT_ID = "notid"; //UUID
public static final String VRE_ID = "vreid"; //text
public static final String POST_ID = "postid"; //UUID
public static final String APP_ID = "appid"; //text
public static final String HASHTAG = "hashtag"; //text
public static final String COMMENT_ID = "commentid"; //UUID
public static final String COUNT = "count"; //big int
public static final String LIKE_ID = "likeid"; //UUID
public static final String INVITE_ID = "inviteid"; //UUID
public static final String STATUS = "status"; //text
public static final String EMAIL = "email"; //text
public static final String ATTACH_ID = "attachid"; //UUID
public static final String URI = "uri"; //text
public static final String NAME = "name"; //text
public static final String DESCRIPTION = "description"; //text
public static final String URI_THUMBNAIL = "urithumbnail"; //text
public static final String MIME_TYPE = "mimetype"; //text
public static final String SENDER_USER_ID = "senderuserid"; //text
public static final String CONTROL_CODE = "controlcode"; //text
public static final String SENDER_FULL_NAME = "senderfullname"; //text
public static final String FULL_NAME = "fullname"; //text
public static final String THUMBNAIL_URL = "thumbnailurl"; //text
public static final String COMMENT = "comment"; //text
public static final String IS_EDIT = "isedit"; //bool
public static final String LAST_EDIT_TIME = "lastedittime"; //timestamp
public static final String SUBJECT_ID = "subjectid"; //text
public static final String SENDER_ID = "senderid"; //text
public static final String SENDER_THUMBNAIL_URL = "senderthumbnailurl"; //text
public static final String IS_READ = "isread"; //bool
public static final String LINK_HOST = "linkhost"; //text
public static final String LIKES_NO = "likesno"; //big int
public static final String LINK_DESCRIPTION = "linkdescription"; //text
public static final String IS_APPLICATION_POST = "isapplicationpost"; //bool -->
public static final String ENTITY_ID = "entityid"; //text
public static final String PRIVACY = "privacy"; //text
public static final String MULTI_FILE_UPLOAD = "multifileupload"; //bool
public static final String COMMENTS_NO = "commentsno"; //big int
public static final String LINK_TITLE = "linktitle"; //text
}