Compare commits

...

48 Commits

Author SHA1 Message Date
Ahmed Salah Tawfik Ibrahim 392ab46bc9 2.0.1 2024-05-17 11:34:00 +02:00
Ahmed Salah Tawfik Ibrahim 400cff1076 2.0.1-SNAPSHOT 2024-05-17 10:21:44 +02:00
Ahmed Salah Tawfik Ibrahim 572c7b8199 removed noisy logs 2024-04-22 10:49:02 +02:00
Ahmed Salah Tawfik Ibrahim 8509015a18 2.0.1 2024-04-16 15:19:38 +02:00
Ahmed Salah Tawfik Ibrahim 9fbec3653d 2.0.1-SNAPSHOT 2024-04-16 15:19:04 +02:00
Ahmed Salah Tawfik Ibrahim 85ab33184d cleaning logs 2024-04-16 15:04:52 +02:00
Ahmed Salah Tawfik Ibrahim 01821f5da3 cleaning logs 2024-04-16 15:03:57 +02:00
Ahmed Salah Tawfik Ibrahim bcbec61646 bug fix 2024-04-12 17:25:49 +02:00
Ahmed Salah Tawfik Ibrahim 93e501d65f bug fix 2024-04-12 17:25:22 +02:00
Ahmed Salah Tawfik Ibrahim 9a64a684d1 bug fix 2024-04-12 17:17:48 +02:00
Ahmed Salah Tawfik Ibrahim 5b5dfb56c4 bug fix 2024-04-12 17:17:23 +02:00
Ahmed Salah Tawfik Ibrahim 1562e6a441 bug fix 2024-04-12 17:02:13 +02:00
Ahmed Salah Tawfik Ibrahim 29471627da bug fix 2024-04-12 17:01:16 +02:00
Ahmed Salah Tawfik Ibrahim 8b6f4ccf2f bug fix 2024-04-12 16:32:02 +02:00
Ahmed Salah Tawfik Ibrahim 82d13b556b snapshot bug fix 2024-04-12 16:31:23 +02:00
Ahmed Salah Tawfik Ibrahim 899640141c possible fix 2024-04-11 18:30:21 +02:00
Ahmed Salah Tawfik Ibrahim 368d420799 possible fix 2024-04-11 18:19:55 +02:00
Ahmed Salah Tawfik Ibrahim 18cb5b7312 bug fixed 2024-04-11 14:44:58 +02:00
Ahmed Salah Tawfik Ibrahim b8f9b0a90a debug_error 2024-04-11 12:39:35 +02:00
Ahmed Salah Tawfik Ibrahim bcd8e29a8c Merge remote-tracking branch 'origin/master' 2023-12-13 13:47:29 +01:00
Ahmed Salah Tawfik Ibrahim 35c8cfdc5a Debug notification 2023-12-13 13:47:19 +01:00
Massimiliano Assante c06e47ea68 ready to release 2023-12-11 12:58:57 +01:00
Massimiliano Assante 5c2e5d5874 SNAPSHOT version 2023-12-11 12:58:08 +01:00
Ahmed Salah Tawfik Ibrahim 2a9bdc6c4a Ready for Release 2023-12-07 14:24:14 +01:00
Ahmed Salah Tawfik Ibrahim 083ca91e45 SNAPSHOT Version Ready 2023-12-07 14:23:28 +01:00
Ahmed Salah Tawfik Ibrahim 7aa3a1244e Ready for release 2023-12-07 14:21:20 +01:00
Ahmed Salah Tawfik Ibrahim 70d7e31540 Test new datacenter name 2023-12-07 14:14:23 +01:00
Ahmed Salah Tawfik Ibrahim 02bc40b6e8 Added Service Discovery 2023-12-06 16:40:46 +01:00
Ahmed Salah Tawfik Ibrahim 68a7532379 Added Service Discovery 2023-12-06 15:53:23 +01:00
Ahmed Salah Tawfik Ibrahim 79f326d906 Ready to release 2023-12-05 16:39:16 +01:00
Ahmed Salah Tawfik Ibrahim 0a9e97f680 Ready to release 2023-12-05 16:38:31 +01:00
Ahmed Salah Tawfik Ibrahim 099432f9a1 Ready to release 2023-12-05 16:31:08 +01:00
Ahmed Salah Tawfik Ibrahim 81542d5c0a Merge remote-tracking branch 'origin/master' 2023-12-05 16:30:09 +01:00
Ahmed Salah Tawfik Ibrahim 0d435d2398 Ready to release 2023-12-05 16:30:00 +01:00
Massimiliano Assante 38fdd5b762 fixed pom 2023-12-05 15:47:12 +01:00
Massimiliano Assante 9d04f4af59 fixed pom 2023-12-05 15:46:36 +01:00
Ahmed Salah Tawfik Ibrahim e8ca4a46a7 Ready to release 2023-12-04 18:16:12 +01:00
Ahmed Salah Tawfik Ibrahim 8a27841094 Snapshot version 2023-12-04 18:15:03 +01:00
Ahmed Salah Tawfik Ibrahim 785511f6a3 Bugs fixed 2023-12-04 17:38:33 +01:00
Ahmed Salah Tawfik Ibrahim b968a6f7dd Attachments bug fixed 2023-11-28 19:40:20 +01:00
Ahmed Salah Tawfik Ibrahim 61723dcc11 DB Write bug fixed 2023-11-27 15:45:26 +01:00
Ahmed Salah Tawfik Ibrahim 36eefcba26 Added author. 2023-11-24 11:49:40 +01:00
Ahmed Salah Tawfik Ibrahim 9ea2895833 Bug fixed - nullpointer exception result.all(), result.one() 2023-11-22 19:40:29 +01:00
Ahmed Salah Tawfik Ibrahim 6cf162bb91 Fixed performance bug 2023-11-15 18:14:57 +01:00
Ahmed Salah Tawfik Ibrahim 03d2b0cdf9 Remove dependencies from jar. 2023-11-14 12:28:57 +01:00
Ahmed Salah Tawfik Ibrahim 204462fcd6 Remove astyanx dependencies and imports. 2023-10-30 19:04:54 +01:00
Ahmed Salah Tawfik Ibrahim 3bd81a415f Datastax Driver Fully Implemented - Partially Tested - return of deprecated methods 2023-10-30 18:52:02 +01:00
Massimiliano Assante 6465e88378 removed obsolete maven plugin for javadoc 2022-12-20 10:34:49 +01:00
13 changed files with 2771 additions and 1754 deletions

View File

@ -1,10 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="social-library">
<wb-resource deploy-path="/" source-path="/src/main/java"/>
<wb-resource deploy-path="/" source-path="/src/test/java"/>
</wb-module>
</project-modules>

View File

@ -4,9 +4,14 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v1.18.0] - 2022-09-20
## [v2.0.1] - 2024-04-22
- Feature #23891, The (wrongly named) Feed class has been changed to Post, all the methods have been changed accordingly and the old one set as deprecated
- Bug 27218 - Null pointer exception getting notifications preferences fixed
- Feature 27286 - Removed noisy logs
## [v2.0.0] - 2023-12-04
- Support for Cassandra 4.1.3 using DataStax java driver
## [v1.17.0] - 2022-05-13

View File

@ -1,3 +1,3 @@
Manifest-Version: 1.0
Class-Path:
Main-Class: org.gcube.portal.databook.server.Tester

View File

@ -24,10 +24,14 @@ See [Releases](https://code-repo.d4science.org/gCubeSystem/social-util-library/r
* **Massimiliano Assante** ([ORCID](https://orcid.org/0000-0002-3761-1492)) - [ISTI-CNR Infrascience Group](https://www.isti.cnr.it/People/M.Assante)
* **Ahmed Ibrahim** ([ORCID](https://orcid.org/0009-0001-3009-5755)) - [ISTI-CNR Infrascience Group](https://www.isti.cnr.it/en/about/people-detail/976/Ahmed_Salah_Tawfik_Ibrahim)
## Maintainers
* **Massimiliano Assante** ([ORCID](https://orcid.org/0000-0002-3761-1492)) - [ISTI-CNR Infrascience Group](https://www.isti.cnr.it/People/M.Assante)
* **Ahmed Ibrahim** ([ORCID](https://orcid.org/0009-0001-3009-5755)) - [ISTI-CNR Infrascience Group](https://www.isti.cnr.it/en/about/people-detail/976/Ahmed_Salah_Tawfik_Ibrahim)
## License
This project is licensed under the EUPL V.1.1 License - see the [LICENSE.md](LICENSE.md) file for details.

73
pom.xml
View File

@ -1,17 +1,17 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.1.0</version>
<version>1.2.0</version>
<relativePath />
</parent>
<groupId>org.gcube.portal</groupId>
<artifactId>social-networking-library</artifactId>
<version>1.18.0</version>
<version>2.0.1</version>
<name>gCube Social Networking Library</name>
<description>
The gCube Social Networking Library is the 'bridge' between your gCube Applications and the social networking facilities.
@ -28,47 +28,44 @@
<gwtVersion>2.8.1</gwtVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<cassandra.driver.oss.version>4.13.0</cassandra.driver.oss.version>
<logback.version>1.2.3</logback.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>maven-portal-bom</artifactId>
<version>3.6.4</version>
<version>3.7.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-query-builder</artifactId>
<version>${cassandra.driver.oss.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-mapper-runtime</artifactId>
<version>${cassandra.driver.oss.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google</groupId>
<artifactId>gwt-jsonmaker</artifactId>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-core</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-thrift</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.netflix.astyanax</groupId>
<artifactId>astyanax-cassandra</artifactId>
<version>1.2.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
<artifactId>ic-client</artifactId>
<version>1.0.4</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.common.portal</groupId>
@ -94,10 +91,18 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
@ -135,24 +140,6 @@
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<configuration>
<additionalparam>-Xdoclint:none</additionalparam>
<additionalJOption>-Xdoclint:none</additionalJOption>
</configuration>
<version>3.1.0</version>
<executions>
<execution>
<id>generate-doc</id>
<phase>install</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -1,33 +1,25 @@
package org.gcube.portal.databook.server;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.net.InetSocketAddress;
import java.time.Duration;
import java.util.List;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.CqlSessionBuilder;
import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
import com.datastax.oss.driver.api.core.cql.ResultSet;
import com.datastax.oss.driver.api.core.metadata.Metadata;
import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
import com.datastax.oss.driver.api.core.type.DataTypes;
import com.datastax.oss.driver.api.querybuilder.SchemaBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Cluster;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.ddl.ColumnDefinition;
import com.netflix.astyanax.ddl.ColumnFamilyDefinition;
import com.netflix.astyanax.ddl.KeyspaceDefinition;
import com.netflix.astyanax.ddl.SchemaChangeResult;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.thrift.ddl.ThriftColumnDefinitionImpl;
import com.netflix.astyanax.thrift.ddl.ThriftColumnFamilyDefinitionImpl;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Costantino Perciante ISTI-CNR
* @author Ahmed Ibrahim ISTI-CNR
*
*/
public class CassandraClusterConnection {
@ -39,304 +31,473 @@ public class CassandraClusterConnection {
/**
* keyspace location
*/
private static String clusterName;
private static String host;
private static List<InetSocketAddress> hosts;
private static String datacenterName;
private static String keyspaceName;
private CqlSession myKeyspaceSession;
private Keyspace myKeyspace;
/**
*
* @param dropSchema set true if you want do drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema) {
if (clusterName == null || host == null || keyspaceName == null) {
protected CassandraClusterConnection(boolean dropSchema) throws Exception {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(null);
clusterName = cluster.getClusterName();
host = cluster.getHost();
//host = cluster.getHost();
hosts = cluster.getHosts();
datacenterName = cluster.getDatacenterName();
keyspaceName = cluster.getKeyspaceName();
}
AstyanaxContext<Cluster> clusterContext = new AstyanaxContext.Builder()
.forCluster(clusterName)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(
clusterName).setMaxConnsPerHost(100)
.setSeeds(host))
.withConnectionPoolMonitor(
new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
_log.info(keyspaceName + " KeySpace SetUp ...");
SetUpKeySpaces(clusterContext, dropSchema);
SetUpKeySpaces(dropSchema);
myKeyspaceSession = connect(keyspaceName);
_log.info("CONNECTED! using KeySpace: " + keyspaceName);
// then close connection pool for cluster
_log.info("Closing cluster connection pool no longer needed (keyspace one will be used)");
clusterContext.shutdown();
_log.info("Closed cluster connection pool no longer needed (keyspace one will be used)");
}
/**
* Close the connection pool
*/
public void closeConnection(){
if(myKeyspace != null){
try{
_log.info("Closing pool connection");
myKeyspace.getConnectionPool().shutdown();
_log.info("Pool closed!");
}catch(Exception e){
_log.error("Unable to close connection pool", e);
}
}
}
/**
*
* @param dropSchema set true if you want do drop the current and set up new one
* @param dropSchema set true if you want to drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema, String infrastructureName) {
if (clusterName == null || host == null || keyspaceName == null) {
protected CassandraClusterConnection(boolean dropSchema, String infrastructureName) throws Exception {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(infrastructureName);
clusterName = cluster.getClusterName();
host = cluster.getHost();
//host = cluster.getHost();
hosts = cluster.getHosts();
datacenterName = cluster.getDatacenterName();
keyspaceName = cluster.getKeyspaceName();
}
AstyanaxContext<Cluster> clusterContext = new AstyanaxContext.Builder()
.forCluster(clusterName)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl())
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl(
clusterName).setMaxConnsPerHost(100)
.setSeeds(host))
.withConnectionPoolMonitor(
new CountingConnectionPoolMonitor())
.buildCluster(ThriftFamilyFactory.getInstance());
_log.info(keyspaceName + " KeySpace SetUp ...");
SetUpKeySpaces(clusterContext, dropSchema);
SetUpKeySpaces(dropSchema);
myKeyspaceSession = connect(keyspaceName);
_log.info("CONNECTED! using KeySpace: " + keyspaceName);
}
// then close connection pool for cluster
_log.info("Closing cluster connection pool no longer needed (keyspace one will be used)");
clusterContext.shutdown();
_log.info("Closed cluster connection pool no longer needed (keyspace one will be used)");
public CqlSession getKeyspaceSession(){
if (myKeyspaceSession.isClosed()){
myKeyspaceSession = connect(keyspaceName);
}
return myKeyspaceSession;
}
/**
* Get the reference to the current keyspace
* @return keyspace reference
* @param dropSchema set true if you want to drop the current and set up new one
* the connection to cassandra cluster
*/
public Keyspace getKeyspace() {
// The Keyspace instance can be shared among different requests
if(myKeyspace == null){
synchronized(this){
if(myKeyspace == null){ // double checked lock
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forCluster(clusterName)
.forKeyspace(keyspaceName)
.withAstyanaxConfiguration(
new AstyanaxConfigurationImpl()
.setDiscoveryType(NodeDiscoveryType.NONE) // use only the host given as seeds (do not discover)
.setConnectionPoolType(ConnectionPoolType.ROUND_ROBIN) // how to handle connections of the the connection pool
)
.withConnectionPoolConfiguration(
new ConnectionPoolConfigurationImpl("MyConnectionPool")
.setMaxConnsPerHost(3) // for each seed(host)
.setSocketTimeout(2000) //-> default: 11 seconds
//.setConnectTimeout(1000) -> default: 2 seconds
.setSeeds(host)
)
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
context.start();
// save keyspace reference
myKeyspace = context.getEntity();
}
public void SetUpKeySpaces(boolean dropSchema) {
boolean createNew = false;
boolean found = false;
CqlSession session = connect();
Metadata metaData = session.getMetadata();
for (KeyspaceMetadata meta : metaData.getKeyspaces().values()) {
if (meta.getName().toString().equals(keyspaceName)){
found = true;
break;
}
}
return myKeyspace;
}
/**
*
* @param clusterContext
* @param dropSchema
* @throws ConnectionException
*/
public void SetUpKeySpaces(AstyanaxContext<Cluster> clusterContext, boolean dropSchema) {
boolean createNew = false;
clusterContext.start();
try {
Cluster cluster = clusterContext.getEntity();
KeyspaceDefinition keyspaceDef = cluster.describeKeyspace(keyspaceName);
if (dropSchema && keyspaceDef != null) {
if (dropSchema && found) {
_log.info("Dropping Keyspace: " + keyspaceName + " ...");
try {
OperationResult<SchemaChangeResult> returned = cluster.dropKeyspace(keyspaceName);
ResultSet returned = dropKeyspace();
Thread.sleep(2000);
_log.info("Dropped " + returned.getResult().toString());
} catch (ConnectionException e) {
if (returned.wasApplied())
_log.info("Dropped " + keyspaceName);
else
_log.info("Couldn't drop " + keyspaceName);
} catch (Exception e) {
_log.error("Dropping Keyspace operation Failed ... " + keyspaceName + " does NOT exists");
return;
} catch (InterruptedException e) {
e.printStackTrace();
}
createNew = true;
}
keyspaceDef = cluster.makeKeyspaceDefinition();
keyspaceDef = cluster.describeKeyspace(keyspaceName);
if (keyspaceDef == null || keyspaceDef.getName() == null || createNew) {
if (!found || createNew) {
_log.info("Keyspace does not exist, triggering schema creation ... ");
createSchema(cluster);
_log.info("Cluster " + clusterName + " on " + host + " Initialized OK!");
int replicationFactor = 2;
createKeyspace(keyspaceName, replicationFactor);
closeSession(session);
createTables();
_log.info("Using Keyspace " + keyspaceName);
}
} catch (ConnectionException e) {
} catch (Exception e) {
e.printStackTrace();
}
}
/*
*
********************** CASSANDRA KEYSPACE CREATION ***********************
*
*/
/**
* create the databook schema
* @return
* @throws ConnectionException
*/
private void createSchema(Cluster cluster) throws ConnectionException {
Map<String, String> stratOptions = new HashMap<String, String>();
stratOptions.put("replication_factor", "1");
KeyspaceDefinition ksDef = cluster.makeKeyspaceDefinition();
//get static column families with secondary indexes
/**
* define Notifications CF with Type as secondary index
*/
ColumnFamilyDefinition cfDefNotifications = getStaticCFDef(DBCassandraAstyanaxImpl.NOTIFICATIONS, "Type");
/**
* define Feeds CF with Privacy as secondary index
*/
ColumnFamilyDefinition cfDefFeeds = getStaticCFDef(DBCassandraAstyanaxImpl.FEEDS, "Privacy");
/**
* define Comments CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefComments = getStaticCFDef(DBCassandraAstyanaxImpl.COMMENTS, "Feedid");
/**
* define Likes CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefLikes = getStaticCFDef(DBCassandraAstyanaxImpl.LIKES, "Feedid");
/**
* define Invites CF with SenderUserId as secondary index
*/
ColumnFamilyDefinition cfDefInvites = getStaticCFDef(DBCassandraAstyanaxImpl.INVITES, "SenderUserId");
/**
* define Attachments CF with FeedId as secondary index
*/
ColumnFamilyDefinition cfDefAttachments = getStaticCFDef(DBCassandraAstyanaxImpl.ATTACHMENTS, "feedId");
//get dynamic column families, act as auxiliary indexes
ColumnFamilyDefinition cfDefConn = getDynamicCFDef(DBCassandraAstyanaxImpl.CONNECTIONS);
ColumnFamilyDefinition cfDefPendingConn = getDynamicCFDef(DBCassandraAstyanaxImpl.PENDING_CONNECTIONS_CF_NAME);
ColumnFamilyDefinition cfDefVRETimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.VRE_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefAPPTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.APP_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefUserTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_TIMELINE_FEEDS);
ColumnFamilyDefinition cfDefUserLikedFeeds = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_LIKED_FEEDS);
ColumnFamilyDefinition cfDefUserNotifications = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS);
ColumnFamilyDefinition cfDefUserNotificationsUnread = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS_UNREAD);
ColumnFamilyDefinition cfDefUserNotificationsPreferences = getDynamicCFDef(DBCassandraAstyanaxImpl.USER_NOTIFICATIONS_PREFERENCES);
ColumnFamilyDefinition cfDefEmailInvitesTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.EMAIL_INVITES);
ColumnFamilyDefinition cfDefVREInvitesTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.VRE_INVITES);
ColumnFamilyDefinition cfDefHashtagsCounter = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGS_COUNTER);
ColumnFamilyDefinition cfDefHashtagTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGGED_FEEDS);
ColumnFamilyDefinition cfDefHashtagCommentsTimeline = getDynamicCFDef(DBCassandraAstyanaxImpl.HASHTAGGED_COMMENTS);
ksDef.setName(keyspaceName)
.setStrategyOptions(stratOptions)
.setStrategyClass("SimpleStrategy")
.addColumnFamily(cfDefNotifications)
.addColumnFamily(cfDefFeeds)
.addColumnFamily(cfDefComments)
.addColumnFamily(cfDefLikes)
.addColumnFamily(cfDefInvites)
.addColumnFamily(cfDefAttachments)
.addColumnFamily(cfDefConn)
.addColumnFamily(cfDefPendingConn)
.addColumnFamily(cfDefVRETimeline)
.addColumnFamily(cfDefAPPTimeline)
.addColumnFamily(cfDefUserTimeline)
.addColumnFamily(cfDefUserNotifications)
.addColumnFamily(cfDefUserNotificationsUnread)
.addColumnFamily(cfDefUserNotificationsPreferences)
.addColumnFamily(cfDefUserLikedFeeds)
.addColumnFamily(cfDefEmailInvitesTimeline)
.addColumnFamily(cfDefVREInvitesTimeline)
.addColumnFamily(cfDefHashtagsCounter)
.addColumnFamily(cfDefHashtagTimeline)
.addColumnFamily(cfDefHashtagCommentsTimeline);
cluster.addKeyspace(ksDef);
private static CqlSession connect() {
CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.build();
_log.info("[OK] Connected to Cassandra Cluster");
return cqlSession;
}
private static CqlSession connect(String KEYSPACE_NAME) {
CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withKeyspace(KEYSPACE_NAME)
.withLocalDatacenter(datacenterName)
.build();
_log.info("[OK] Connected to Keyspace {} ", KEYSPACE_NAME);
return cqlSession;
}
/**
* create a dynamic column family to be added in a keyspace
*
* @param cfName the CF name
* @return the instance to be added to the keyspace
*/
private ColumnFamilyDefinition getDynamicCFDef(String cfName) {
ColumnFamilyDefinition columnFamilyDefinition = new ThriftColumnFamilyDefinitionImpl();
columnFamilyDefinition.setName(cfName);
columnFamilyDefinition.setKeyValidationClass("UTF8Type");
columnFamilyDefinition.setComparatorType("UTF8Type");
return columnFamilyDefinition;
public static void closeSession(CqlSession session) {
if (session != null) session.close();
_log.info("[OK]Session is now closed");
}
/**
* create a static column family to be added in a keyspace with possibility to add a secondary index for a given column
*
* @param cfName the CF name
* @param secondaryIndexedField the column name of the column to index
* @return the instance to be added to the keyspace
*/
private ColumnFamilyDefinition getStaticCFDef(String cfName, String secondaryIndexedField) {
ColumnFamilyDefinition columnFamilyDefinition = new ThriftColumnFamilyDefinitionImpl();
columnFamilyDefinition.setName(cfName);
columnFamilyDefinition.setKeyValidationClass("UTF8Type");
columnFamilyDefinition.setComparatorType("UTF8Type");
//Add secondary index for userid
ColumnDefinition typeCDef = new ThriftColumnDefinitionImpl();
typeCDef.setName(secondaryIndexedField)
.setValidationClass("UTF8Type");
typeCDef.setIndex(secondaryIndexedField+"_"+UUID.randomUUID().toString().substring(0,5), "KEYS");
columnFamilyDefinition.addColumnDefinition(typeCDef);
return columnFamilyDefinition;
public void closeConnection(){
if(!myKeyspaceSession.isClosed()){
try{
_log.info("Closing connection");
closeSession(myKeyspaceSession);
_log.info("Connection closed!");
}catch(Exception e){
_log.error("Unable to close connection", e);
}
}
}
private static CqlSessionBuilder configBuilder(CqlSessionBuilder cqlSessionBuilder){
return cqlSessionBuilder
.withConfigLoader(DriverConfigLoader.programmaticBuilder()
// Resolves the timeout query 'SELECT * FROM system_schema.tables' timed out after PT2S
.withDuration(DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, Duration.ofMillis(240000))
.withDuration(DefaultDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, Duration.ofMillis(240000))
.withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(240000))
.build());
}
private static void createKeyspace(String keyspaceName, int replicationFactor) {
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.build()) {
cqlSession.execute(SchemaBuilder.createKeyspace(keyspaceName)
.ifNotExists()
.withSimpleStrategy(replicationFactor)
.withDurableWrites(true)
.build());
_log.info("+ Keyspace '{}' created.", keyspaceName);
closeSession(cqlSession);
}
}
private static ResultSet dropKeyspace(){
ResultSet toreturn;
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.build()) {
toreturn = cqlSession.execute(SchemaBuilder.dropKeyspace(keyspaceName).ifExists().build());
_log.info("Keyspace {} dropped.", keyspaceName);
closeSession(cqlSession);
}
return toreturn;
}
private void createTables(){
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.withKeyspace(keyspaceName)
.build()) {
createTableUSERNotificationsPreferences(cqlSession);
createTableUSERNotifications(cqlSession);
createTableVRETimeline(cqlSession);
createTableAppTimeline(cqlSession);
createTableUSERTimeline(cqlSession);
createTableHashtaggedPosts(cqlSession);
createTableHashtaggedComments(cqlSession);
createTableHashtagsCounter(cqlSession);
createTableUSERNotificationsUnread(cqlSession);
createTableUSERLikes(cqlSession);
createTableVREInvites(cqlSession);
createTableEMAILInvites(cqlSession);
createTableAttachments(cqlSession);
createTableInvites(cqlSession);
createTableLikes(cqlSession);
createTableComments(cqlSession);
createTableNotifications(cqlSession);
createTablePosts(cqlSession);
closeSession(cqlSession);
}
}
private void createTableUSERNotificationsPreferences(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserNotificationsPreferences")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("type", DataTypes.TEXT)
.withColumn("preference", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotificationsPreferences");
}
private void createTableUSERNotifications(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserNotifications")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("notid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotifications");
}
private void createTableVRETimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("VRETimeline")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "VRETimeline");
}
private void createTableAppTimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("AppTimeline")
.ifNotExists()
.withPartitionKey("appid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "AppTimeline");
}
private void createTableUSERTimeline(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserTimeline")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERTimeline");
}
private void createTableHashtaggedPosts(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtaggedPosts")
.ifNotExists()
.withPartitionKey("hashtag", DataTypes.TEXT)
.withPartitionKey("postid", DataTypes.UUID)
.withColumn("vreid", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtaggedPosts");
}
private void createTableHashtaggedComments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtaggedComments")
.ifNotExists()
.withPartitionKey("hashtag", DataTypes.TEXT)
.withPartitionKey("commentid", DataTypes.UUID)
.withColumn("vreid", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtaggedComments");
}
private void createTableHashtagsCounter(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("HashtagsCounter")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("hashtag", DataTypes.TEXT)
.withColumn("count", DataTypes.BIGINT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "HashtagsCounter");
}
private void createTableUSERNotificationsUnread(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserUnreadNotifications")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("timestamp", DataTypes.TIMESTAMP)
.withColumn("notid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERNotificationsUnread");
}
private void createTableUSERLikes(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("UserLikes")
.ifNotExists()
.withPartitionKey("userid", DataTypes.TEXT)
.withPartitionKey("likeid", DataTypes.UUID)
.withColumn("postid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "USERLikes");
}
private void createTableVREInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("VREInvites")
.ifNotExists()
.withPartitionKey("vreid", DataTypes.TEXT)
.withPartitionKey("inviteid", DataTypes.UUID)
.withColumn("status", DataTypes.TEXT)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "VREInvites");
}
private void createTableEMAILInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("EmailInvites")
.ifNotExists()
.withPartitionKey("email", DataTypes.TEXT)
.withPartitionKey("vreid", DataTypes.TEXT)
.withColumn("inviteid", DataTypes.UUID)
.withCompactStorage()
.build());
_log.info("+ Table '{}' has been created (if needed).", "EMAILInvites");
}
private void createTableAttachments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Attachments")
.ifNotExists()
.withPartitionKey("attachid", DataTypes.UUID)
.withColumn("postid", DataTypes.UUID)
.withColumn("uri", DataTypes.TEXT)
.withColumn("name", DataTypes.TEXT)
.withColumn("description", DataTypes.TEXT)
.withColumn("urithumbnail", DataTypes.TEXT)
.withColumn("mimetype", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_attach")
.ifNotExists()
.onTable("Attachments")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Attachments");
}
private void createTableInvites(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Invites")
.ifNotExists()
.withPartitionKey("inviteid", DataTypes.UUID)
.withColumn("senderuserid", DataTypes.TEXT)
.withColumn("vreid", DataTypes.TEXT)
.withColumn("email", DataTypes.TEXT)
.withColumn("controlcode", DataTypes.TEXT)
.withColumn("status", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("senderfullname", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("sender")
.ifNotExists()
.onTable("Invites")
.andColumn("senderuserid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Invites");
}
private void createTableLikes(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Likes")
.ifNotExists()
.withPartitionKey("likeid", DataTypes.UUID)
.withColumn("userid", DataTypes.TEXT)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("postid", DataTypes.UUID)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_likes")
.ifNotExists()
.onTable("Likes")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Likes");
}
private void createTableComments(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Comments")
.ifNotExists()
.withPartitionKey("commentid", DataTypes.UUID)
.withColumn("userid", DataTypes.TEXT)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("comment", DataTypes.TEXT)
.withColumn("postid", DataTypes.UUID)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("isedit", DataTypes.BOOLEAN)
.withColumn("lastedittime", DataTypes.TIMESTAMP)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("post_comments")
.ifNotExists()
.onTable("Comments")
.andColumn("postid")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Comments");
}
private void createTableNotifications(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Notifications")
.ifNotExists()
.withPartitionKey("notid", DataTypes.UUID)
.withColumn("type", DataTypes.TEXT)
.withColumn("userid", DataTypes.TEXT)
.withColumn("subjectid", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("description", DataTypes.TEXT)
.withColumn("uri", DataTypes.TEXT)
.withColumn("senderid", DataTypes.TEXT)
.withColumn("senderfullname", DataTypes.TEXT)
.withColumn("senderthumbnailurl", DataTypes.TEXT)
.withColumn("isread", DataTypes.BOOLEAN)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("not_type")
.ifNotExists()
.onTable("Notifications")
.andColumn("type")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Notifications");
}
private void createTablePosts(CqlSession cqlSession) {
cqlSession.execute(SchemaBuilder.createTable("Posts")
.ifNotExists()
.withPartitionKey("postid", DataTypes.UUID)
.withColumn("linkhost", DataTypes.TEXT)
.withColumn("description", DataTypes.TEXT)
.withColumn("email", DataTypes.TEXT)
.withColumn("likesno", DataTypes.BIGINT)
.withColumn("thumbnailurl", DataTypes.TEXT)
.withColumn("linkdescription", DataTypes.TEXT)
.withColumn("timestamp", DataTypes.TIMESTAMP)
.withColumn("uri", DataTypes.TEXT)
.withColumn("isapplicationpost", DataTypes.BOOLEAN)
.withColumn("entityid", DataTypes.TEXT)
.withColumn("privacy", DataTypes.TEXT)
.withColumn("type", DataTypes.TEXT)
.withColumn("urithumbnail", DataTypes.TEXT)
.withColumn("vreid", DataTypes.TEXT)
.withColumn("multifileupload", DataTypes.BOOLEAN)
.withColumn("fullname", DataTypes.TEXT)
.withColumn("commentsno", DataTypes.BIGINT)
.withColumn("linktitle", DataTypes.TEXT)
.withCompactStorage()
.build());
cqlSession.execute(SchemaBuilder.createIndex("posts_privacy")
.ifNotExists()
.onTable("Posts")
.andColumn("privacy")
.build());
_log.info("+ Table '{}' has been created (if needed).", "Posts");
}
}

View File

@ -5,10 +5,6 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.ImmutableMap;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.serializers.StringSerializer;
public class DatabookCassandraTest {
private static DBCassandraAstyanaxImpl store;

View File

@ -30,8 +30,6 @@ import org.gcube.portal.databook.shared.ex.NotificationIDNotFoundException;
import org.gcube.portal.databook.shared.ex.NotificationTypeNotFoundException;
import org.gcube.portal.databook.shared.ex.PrivacyLevelTypeNotFoundException;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Costantino Perciante ISTI-CNR
@ -304,14 +302,13 @@ public interface DatabookStore {
*/
RangeFeeds getRecentFeedsByVREAndRange(String vreid, int from, int quantity) throws PrivacyLevelTypeNotFoundException, FeedTypeNotFoundException, ColumnNameNotFoundException, FeedIDNotFoundException;
/**
* return the most recent posts for this vre up to quantity param and the last index of the feeds in the timeline
* lastReturnedFeedTimelineIndex is usuful to know from where to start the range the second time you ask
* because there are deletions
* return the most recent posts for this vre up to quantity param and the last index of the posts in the timeline
* lastReturnedPostTimelineIndex is useful to know from where to start the range the next time you ask, because there are deletions
*
* @param vreid VRES identifier
* @param from the range start (most recent feeds for this vre) has to be greater than 0
* @param quantity the number of most recent feeds for this vre starting from "from" param
* @return a <class>lastReturnedFeedTimelineIndex</class> containing of most recent feeds for this vre
* @return a <class>RangePosts</class> containing of most recent feeds for this vre
* @throws FeedTypeNotFoundException
* @throws PrivacyLevelTypeNotFoundException
* @throws ColumnNameNotFoundException
@ -657,12 +654,16 @@ public interface DatabookStore {
*/
List<Attachment> getAttachmentsByFeedId(String feedId) throws FeedIDNotFoundException;
/**
* save an attachment
*/
boolean saveAttachmentEntry(String feedId, Attachment toSave);
/**
* Retrieve all the ids of the vre
* @return the set of ids of the vre available or empty list in case of errors.
* @throws ConnectionException
*/
public List<String> getAllVREIds() throws ConnectionException;
public List<String> getAllVREIds();
/**
* close the connection to the underlying database

View File

@ -7,9 +7,14 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.metadata.Metadata;
import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
import org.gcube.common.portal.GCubePortalConstants;
import org.gcube.common.portal.PortalContext;
import org.gcube.common.resources.gcore.ServiceEndpoint;
@ -22,8 +27,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Ahmed Ibrahim ISTI-CNR
*
* @version 0.1 Dec 2012
* @version 2.0.0 October 2023
*
*/
@SuppressWarnings("serial")
@ -38,12 +44,12 @@ public class RunningCluster implements Serializable {
*/
private static final String HOST_PROPERTY = "host";
private static final String HOST_PORT_PROPERTY = "port";
private static final String CLUSTER_NAME_PROPERTY = "cluster";
private static final String DATACENTER_NAME_PROPERTY = "datacenter";
private static final String KEY_SPACE_NAME_PROPERTY = "keyspace";
/**
* other constants
*/
private final static String RUNTIME_RESOURCE_NAME = "SocialPortalDataStore";
private final static String RUNTIME_RESOURCE_NAME = "SocialDB";
private final static String PLATFORM_NAME = "Cassandra";
private static final String DEFAULT_CONFIGURATION = "/org/gcube/portal/databook/server/resources/databook.properties";
@ -56,17 +62,17 @@ public class RunningCluster implements Serializable {
/**
* Cluster Name
*/
private String clusterName;
private String datacenterName;
/**
* Keyspace Name
*/
private String keyspaceName;
private String keyspaceName; //to be modified
/**
* @param infrastructureName could be null
* @return an instance of the RunningCluster
*/
public static synchronized RunningCluster getInstance(String infrastructureName) {
public static synchronized RunningCluster getInstance(String infrastructureName){
if (singleton == null) {
singleton = new RunningCluster(infrastructureName);
}
@ -75,8 +81,9 @@ public class RunningCluster implements Serializable {
/**
* private constructor
*/
private RunningCluster(String infrastructureName) {
try {
private RunningCluster(String infrastructureName){
//Query the IS (for the future)
try{
List<ServiceEndpoint> resources = getConfigurationFromIS(infrastructureName);
if (resources.size() > 1) {
_log.error("Too many Runtime Resource having name " + RUNTIME_RESOURCE_NAME +" in this scope ");
@ -91,20 +98,25 @@ public class RunningCluster implements Serializable {
for (ServiceEndpoint res : resources) {
AccessPoint found = res.profile().accessPoints().iterator().next();
host = found.address();
clusterName = found.description();
datacenterName = found.description();
keyspaceName = found.name();
}
}
} catch (Exception e) {
e.printStackTrace();
}
/*host = "10.1.28.55:9042, 10.1.30.142:9042, 10.1.28.100:9042";
datacenterName = "1";
keyspaceName = "dev_mig_consistent";*/
}
/**
*
* @return the
* @throws Exception
*/
private List<ServiceEndpoint> getConfigurationFromIS(String infrastructureName) throws Exception {
private List<ServiceEndpoint> getConfigurationFromIS(String infrastructureName) {
_log.debug("getConfigurationFromIS infrastructureName="+infrastructureName );
String scope = "/";
if(infrastructureName != null && !infrastructureName.isEmpty())
@ -125,13 +137,14 @@ public class RunningCluster implements Serializable {
}
private String readInfrastructureName() {
Properties props = new Properties();
try {
StringBuilder sb = new StringBuilder(getCatalinaHome());
sb.append(File.separator)
.append(PortalContext.CONFIGURATION_FOLDER)
.append(File.separator)
.append(PortalContext.INFRA_PROPERTY_FILENAME);
.append(PortalContext.CONFIGURATION_FOLDER)
.append(File.separator)
.append(PortalContext.INFRA_PROPERTY_FILENAME);
String propertyfile = sb.toString();
File propsFile = new File(propertyfile);
FileInputStream fis = new FileInputStream(propsFile);
@ -153,7 +166,7 @@ public class RunningCluster implements Serializable {
try {
props.load(CassandraClusterConnection.class.getResourceAsStream(DEFAULT_CONFIGURATION));
host = props.getProperty(HOST_PROPERTY) + ":" + props.getProperty(HOST_PORT_PROPERTY);
clusterName = props.getProperty(CLUSTER_NAME_PROPERTY);
datacenterName = props.getProperty(DATACENTER_NAME_PROPERTY);
keyspaceName = props.getProperty(KEY_SPACE_NAME_PROPERTY);
} catch (IOException e) {
e.printStackTrace();
@ -169,25 +182,19 @@ public class RunningCluster implements Serializable {
this.host = host;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getKeyspaceName() {
return keyspaceName;
}
public void setKeyspaceName(String keyspaceName) {
this.keyspaceName = keyspaceName;
}
@Override
public String toString() {
return "RunningCluster [host=" + host + ", clusterName=" + clusterName
return "RunningCluster [host=" + host + ", datacenterName=" + datacenterName
+ ", keyspaceName=" + keyspaceName + "]";
}
/**
@ -197,4 +204,21 @@ public class RunningCluster implements Serializable {
private static String getCatalinaHome() {
return (System.getenv("CATALINA_HOME").endsWith("/") ? System.getenv("CATALINA_HOME") : System.getenv("CATALINA_HOME")+"/");
}
public void setDatacenterName(String datacenterName){
this.datacenterName = datacenterName;
}
public String getDatacenterName() {
return datacenterName;
}
public List<InetSocketAddress> getHosts() {
List<InetSocketAddress> hosts = new ArrayList<>();
String [] ips = host.split(", ");
for (String ip: ips){
String[] ip_port = ip.split(":");
hosts.add(new InetSocketAddress(ip_port[0], Integer.parseInt(ip_port[1])));
}
return hosts;
}
}

View File

@ -0,0 +1,76 @@
package org.gcube.portal.databook.server;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Ahmed Ibrahim ISTI-CNR
*
* @version 2.0.0 October 2023
*
*/
public class Schema {
//Tables
public static final String NOTIFICATIONS = "Notifications";
public static final String POSTS = "Posts";
public static final String COMMENTS = "Comments";
public static final String LIKES = "Likes";
public static final String INVITES = "Invites";
public static final String VRE_TIMELINE_POSTS = "VRETimeline";
public static final String USER_TIMELINE_POSTS = "UserTimeline";
public static final String APP_TIMELINE_POSTS = "AppTimeline";
public static final String USER_LIKED_POSTS = "UserLikes";
public static final String USER_NOTIFICATIONS = "UserNotifications"; // regular user notifications timeline (both read and unread, messages are included)
public static final String USER_NOTIFICATIONS_UNREAD = "UserUnreadNotifications"; // only unread user notifications/ notifications messages
public static final String USER_NOTIFICATIONS_PREFERENCES = "UserNotificationsPreferences"; // preferences for notifications
public static final String HASHTAGS_COUNTER = "HashtagsCounter"; // count the hashtags per group and type
public static final String HASHTAGGED_POSTS = "HashtaggedPosts"; // contains hashtags per type associated with vre and POST
public static final String HASHTAGGED_COMMENTS = "HashtaggedComments"; // contains hashtags per type associated with vre and comment
public static final String VRE_INVITES = "VREInvites"; //contains the emails that were invited per VRE
public static final String EMAIL_INVITES = "EmailInvites"; //contains the list of invitation per email
public static final String ATTACHMENTS = "Attachments"; //contains the list of all the attachments in a POST
//columns
public static final String USER_ID = "userid"; //text
public static final String TYPE = "type"; //text
public static final String PREFERENCE = "preference"; //text
public static final String TIMESTAMP = "timestamp"; //timestamp
public static final String NOT_ID = "notid"; //UUID
public static final String VRE_ID = "vreid"; //text
public static final String POST_ID = "postid"; //UUID
public static final String APP_ID = "appid"; //text
public static final String HASHTAG = "hashtag"; //text
public static final String COMMENT_ID = "commentid"; //UUID
public static final String COUNT = "count"; //big int
public static final String LIKE_ID = "likeid"; //UUID
public static final String INVITE_ID = "inviteid"; //UUID
public static final String STATUS = "status"; //text
public static final String EMAIL = "email"; //text
public static final String ATTACH_ID = "attachid"; //UUID
public static final String URI = "uri"; //text
public static final String NAME = "name"; //text
public static final String DESCRIPTION = "description"; //text
public static final String URI_THUMBNAIL = "urithumbnail"; //text
public static final String MIME_TYPE = "mimetype"; //text
public static final String SENDER_USER_ID = "senderuserid"; //text
public static final String CONTROL_CODE = "controlcode"; //text
public static final String SENDER_FULL_NAME = "senderfullname"; //text
public static final String FULL_NAME = "fullname"; //text
public static final String THUMBNAIL_URL = "thumbnailurl"; //text
public static final String COMMENT = "comment"; //text
public static final String IS_EDIT = "isedit"; //bool
public static final String LAST_EDIT_TIME = "lastedittime"; //timestamp
public static final String SUBJECT_ID = "subjectid"; //text
public static final String SENDER_ID = "senderid"; //text
public static final String SENDER_THUMBNAIL_URL = "senderthumbnailurl"; //text
public static final String IS_READ = "isread"; //bool
public static final String LINK_HOST = "linkhost"; //text
public static final String LIKES_NO = "likesno"; //big int
public static final String LINK_DESCRIPTION = "linkdescription"; //text
public static final String IS_APPLICATION_POST = "isapplicationpost"; //bool -->
public static final String ENTITY_ID = "entityid"; //text
public static final String PRIVACY = "privacy"; //text
public static final String MULTI_FILE_UPLOAD = "multifileupload"; //bool
public static final String COMMENTS_NO = "commentsno"; //big int
public static final String LINK_TITLE = "linktitle"; //text
}

View File

@ -0,0 +1,53 @@
package org.gcube.portal.databook.server;
import org.gcube.portal.databook.shared.*;
import org.gcube.portal.databook.shared.ex.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* @author Massimiliano Assante ISTI-CNR
* @author Ahmed Ibrahim ISTI-CNR
*
* @version 2.0.0 October 2023
*
*/
public class Tester {
private static DBCassandraAstyanaxImpl store;
private static Logger LOGGER = LoggerFactory.getLogger(Tester.class);
public Tester() {
store = new DBCassandraAstyanaxImpl("gcube"); //set to true if you want to drop the KeySpace and recreate it
}
public static void main(String[] args) throws ColumnNameNotFoundException, PrivacyLevelTypeNotFoundException, FeedIDNotFoundException, FeedTypeNotFoundException {
Tester test = new Tester();
//test.getComment();
test.testFunc();
System.exit(0);
}
public void testFunc() throws ColumnNameNotFoundException, PrivacyLevelTypeNotFoundException, FeedIDNotFoundException, FeedTypeNotFoundException {
String postIdToUpdate = "047c601d-2291-4974-9224-d6732b1fbe26";
Post read = store.readPost(postIdToUpdate);
List<Comment> readC = store.getAllCommentByPost("047c601d-2291-4974-9224-d6732b1fbe26");
System.out.println(read);
readC.forEach(c -> System.out.println(c.getText()));
}
public void getComment(){
String uuid = "820969b2-4632-4197-9fd6-5aafab781faa";
Comment c;
try {
c = store.readCommentById(uuid);
System.out.println(c);
} catch (CommentIDNotFoundException e) {
// TODO Auto-generated catch block
System.err.println(e.toString());
}
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.portal.databook.shared;
import java.io.Serializable;
import java.util.List;
@SuppressWarnings("serial")
public class PostWithAttachment implements Serializable {
private Post post;
private List<Attachment> attachments;
public PostWithAttachment(Post post, List<Attachment> attachments){
super();
this.post = post;
this.attachments = attachments;
}
public void setAttachments(List<Attachment> attachments) {
this.attachments = attachments;
}
public void setPost(Post post) {
this.post = post;
}
public List<Attachment> getAttachments() {
return attachments;
}
public Post getPost() {
return post;
}
public String toString() {
String postInfo = "Post [key=" + post.getKey() + ", type=" + post.getType() + ", entityId=" + post.getEntityId()
+ ", time=" + post.getTime() + ", vreid=" + post.getVreid() + ", uri=" + post.getUri()
+ ", uriThumbnail=" + post.getUriThumbnail() + ", description="
+ post.getDescription() + ", privacy=" + post.getPrivacy() + ", fullName="
+ post.getFullName() + ", email=" + post.getEmail() + ", thumbnailURL="
+ post.getThumbnailURL() + ", commentsNo=" + post.getCommentsNo() + ", likesNo="
+ post.getLikesNo() + ", linkTitle=" + post.getLinkTitle() + ", linkDescription="
+ post.getLinkDescription() + ", linkHost=" + post.getLinkHost()
+ ", applicationFeed=" + post.isApplicationFeed()
+ ", multiFileUpload=" + post.isMultiFileUpload() + "]";
String attachmentInfo = "[Attachments: ";
for (Attachment attachment: attachments){
attachmentInfo += "[Attachment [key=" + attachment.getId() + ", uri=" + attachment.getUri() + ", name=" + attachment.getName() + ", description="
+ attachment.getDescription() + ", thumbnailURL=" + attachment.getThumbnailURL()
+ ", mimeType=" + attachment.getMimeType() + "]],";
}
attachmentInfo = attachmentInfo.substring(0, attachmentInfo.length()-1);
attachmentInfo += "]";
;
return "[" + postInfo + ", " + attachmentInfo + "]" ;
}
}