Compare commits

..

No commits in common. "master" and "Feature/25901-Fix" have entirely different histories.

7 changed files with 67 additions and 118 deletions

View File

@ -1,13 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="social-library">
<wb-resource deploy-path="/" source-path="/src/main/java"/>
<wb-resource deploy-path="/" source-path="/src/test/java"/>
</wb-module>
</project-modules>

View File

@ -4,14 +4,9 @@
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v2.0.1] - 2024-04-22
## [v2.0.0] - 2023-10-30
- Bug 27218 - Null pointer exception getting notifications preferences fixed
- Feature 27286 - Removed noisy logs
## [v2.0.0] - 2023-12-04
- Support for Cassandra 4.1.3 using DataStax java driver
- Feature #25901-fix, same as feature 25901 but with minor fixes related to deprecated methods and classes
## [v1.17.0] - 2022-05-13

12
pom.xml
View File

@ -5,13 +5,13 @@
<parent>
<artifactId>maven-parent</artifactId>
<groupId>org.gcube.tools</groupId>
<version>1.2.0</version>
<version>1.1.0</version>
<relativePath />
</parent>
<groupId>org.gcube.portal</groupId>
<artifactId>social-networking-library</artifactId>
<version>2.0.1</version>
<version>2.0.0-SNAPSHOT</version>
<name>gCube Social Networking Library</name>
<description>
The gCube Social Networking Library is the 'bridge' between your gCube Applications and the social networking facilities.
@ -36,7 +36,7 @@
<dependency>
<groupId>org.gcube.distribution</groupId>
<artifactId>maven-portal-bom</artifactId>
<version>3.7.0</version>
<version>3.6.4</version>
<type>pom</type>
<scope>import</scope>
</dependency>
@ -47,19 +47,16 @@
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-query-builder</artifactId>
<version>${cassandra.driver.oss.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.datastax.oss</groupId>
<artifactId>java-driver-mapper-runtime</artifactId>
<version>${cassandra.driver.oss.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google</groupId>
<artifactId>gwt-jsonmaker</artifactId>
<version>1.2.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.gcube.resources.discovery</groupId>
@ -91,18 +88,15 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>

View File

@ -41,7 +41,7 @@ public class CassandraClusterConnection {
* @param dropSchema set true if you want do drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema) throws Exception {
protected CassandraClusterConnection(boolean dropSchema) {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(null);
@ -62,7 +62,7 @@ public class CassandraClusterConnection {
* @param dropSchema set true if you want to drop the current and set up new one
* the connection to cassandra cluster
*/
protected CassandraClusterConnection(boolean dropSchema, String infrastructureName) throws Exception {
protected CassandraClusterConnection(boolean dropSchema, String infrastructureName) {
if (hosts == null || datacenterName == null || keyspaceName == null) {
RunningCluster cluster = RunningCluster.getInstance(infrastructureName);
//host = cluster.getHost();
@ -145,7 +145,7 @@ public class CassandraClusterConnection {
CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withKeyspace(KEYSPACE_NAME)
.withLocalDatacenter(datacenterName)
.withLocalDatacenter("1")
.build();
_log.info("[OK] Connected to Keyspace {} ", KEYSPACE_NAME);
return cqlSession;
@ -180,7 +180,7 @@ public class CassandraClusterConnection {
private static void createKeyspace(String keyspaceName, int replicationFactor) {
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.withLocalDatacenter("1")
.build()) {
cqlSession.execute(SchemaBuilder.createKeyspace(keyspaceName)
.ifNotExists()
@ -196,7 +196,7 @@ public class CassandraClusterConnection {
ResultSet toreturn;
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.withLocalDatacenter("1")
.build()) {
toreturn = cqlSession.execute(SchemaBuilder.dropKeyspace(keyspaceName).ifExists().build());
_log.info("Keyspace {} dropped.", keyspaceName);
@ -207,7 +207,7 @@ public class CassandraClusterConnection {
private void createTables(){
try (CqlSession cqlSession = configBuilder(CqlSession.builder())
.addContactPoints(hosts)
.withLocalDatacenter(datacenterName)
.withLocalDatacenter("1")
.withKeyspace(keyspaceName)
.build()) {

View File

@ -46,32 +46,20 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
* @param dropSchema set true if you want do drop the current and set up new one
*/
protected DBCassandraAstyanaxImpl(boolean dropSchema) {
try {
conn = new CassandraClusterConnection(dropSchema);
} catch (Exception e) {
throw new RuntimeException(e);
}
conn = new CassandraClusterConnection(dropSchema);
}
/**
* public constructor, no dropping schema is allowed
*/
public DBCassandraAstyanaxImpl() {
try {
conn = new CassandraClusterConnection(false);
} catch (Exception e) {
throw new RuntimeException(e);
}
conn = new CassandraClusterConnection(false);
}
/**
* public constructor, no dropping schema is allowed, infrastructureName is given.
*/
public DBCassandraAstyanaxImpl(String infrastructureName) {
try {
conn = new CassandraClusterConnection(false, infrastructureName);
} catch (Exception e) {
throw new RuntimeException(e);
}
conn = new CassandraClusterConnection(false, infrastructureName);
}
/*
@ -748,7 +736,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
Boolean result = session.execute(writeBatch).wasApplied();
if (result){
_log.debug("Wrote user post with id " + post.getKey());
_log.info("Wrote user post with id " + post.getKey());
}
@ -796,7 +784,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
Boolean result = session.execute(writeBatch).wasApplied();
if (result){
_log.debug("Wrote user post with id " + post.getKey());
_log.info("Wrote user post with id " + post.getKey());
}
@ -831,7 +819,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
post.setMultiFileUpload(true);
boolean savePostResult = saveUserPost(post);
if (savePostResult) {
_log.debug("Post has been saved");
_log.info("Post has been saved");
String postkey = post.getKey();
for (Attachment attachment : attachments) {
boolean attachSaveResult = saveAttachmentEntry(postkey, attachment);
@ -888,7 +876,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
boolean result = session.execute(writeBatch).wasApplied();
if (result)
_log.debug("Wrote app post with id " + post.getKey());
_log.info("Wrote app post with id " + post.getKey());
return result;
}
@ -936,7 +924,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
boolean result = session.execute(writeBatch).wasApplied();
if (result)
_log.debug("Wrote app post with id " + post.getKey());
_log.info("Wrote app post with id " + post.getKey());
return result;
}
@ -1169,7 +1157,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return false;
}
_log.debug("Delete Post OK");
_log.info("Delete Post OK");
return true;
}
/**
@ -1189,7 +1177,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return false;
}
_log.debug("Delete Post OK");
_log.info("Delete Post OK");
return true;
}
/**
@ -2188,34 +2176,20 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
*/
@Override
public List<NotificationChannelType> getUserNotificationChannels(String userid, NotificationType notificationType) throws NotificationChannelTypeNotFoundException, NotificationTypeNotFoundException {
_log.debug("Asking for Single Notification preference of " + userid + " Type: " + notificationType);
_log.trace("Asking for Single Notification preference of " + userid + " Type: " + notificationType);
List<NotificationChannelType> toReturn = new ArrayList<NotificationChannelType>();
Map<NotificationType, NotificationChannelType[]> userNotPref = getUserNotificationPreferences(userid);
if(userNotPref!=null){
if(userNotPref.containsKey(notificationType)){
NotificationChannelType[] toProcess = userNotPref.get(notificationType);
//_log.debug("size of user notification preferences" + toProcess.length);
if (toProcess == null) {
_log.debug("Single Notification preference of " + userid + " Type: " + notificationType + " not existing ... creating default");
return createNewNotificationType(userid, notificationType);
}
else if (toProcess.length == 0){
_log.debug("size of user notification preferences " + 0);
return toReturn;
}
else
{
_log.debug("size of user notification preferences " + toProcess.length);
for (int i = 0; i < toProcess.length; i++) {
toReturn.add(toProcess[i]);
}
return toReturn;
}
}
return toReturn;
NotificationChannelType[] toProcess = getUserNotificationPreferences(userid).get(notificationType);
if (toProcess == null) {
_log.warn("Single Notification preference of " + userid + " Type: " + notificationType + " not existing ... creating default");
return createNewNotificationType(userid, notificationType);
}
else if (toProcess.length == 0)
return toReturn;
else
for (int i = 0; i < toProcess.length; i++) {
toReturn.add(toProcess[i]);
}
return toReturn;
}
/**
* called when you add new notification types where the setting does not exist yet
@ -2223,7 +2197,6 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
*/
private List<NotificationChannelType> createNewNotificationType(String userid, NotificationType notificationType) {
List<NotificationChannelType> toReturn = new ArrayList<NotificationChannelType>();
_log.debug("Create new notification type");
CqlSession session = conn.getKeyspaceSession();
String valueToInsert = "";
@ -2249,11 +2222,9 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
}
if (res) {
_log.debug("Set New Notification Setting for " + userid + " OK");
_log.debug("toreturn:" + toReturn.toString());
_log.trace("Set New Notification Setting for " + userid + " OK");
return toReturn;
}
_log.debug("empty list");
return new ArrayList<NotificationChannelType>(); //no notification if sth fails
}
/**
@ -2266,10 +2237,10 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
for (NotificationType nType : enabledChannels.keySet()) {
String valueToInsert = "";
_log.debug("Type: " + nType.toString());
_log.info("Type: " + nType.toString());
int channelsNo = (enabledChannels.get(nType) != null) ? enabledChannels.get(nType).length : 0;
for (int i = 0; i < channelsNo; i++) {
_log.debug(enabledChannels.get(nType)[i].toString());
_log.info(enabledChannels.get(nType)[i].toString());
valueToInsert += NotificationChannelType.valueOf(enabledChannels.get(nType)[i].toString());
if (i < channelsNo-1)
valueToInsert += ",";
@ -2285,9 +2256,9 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
BatchStatement writeBatch = getBatch().addAll(boundStatements);
boolean overAllresult = session.execute(writeBatch).wasApplied();
if (overAllresult)
_log.debug("Set Notification Map for " + userid + " OK");
_log.info("Set Notification Map for " + userid + " OK");
else
_log.debug("Set Notification Map for " + userid + " FAILED");
_log.info("Set Notification Map for " + userid + " FAILED");
return overAllresult;
}
/**
@ -2297,7 +2268,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
*/
@Override
public Map<NotificationType, NotificationChannelType[]> getUserNotificationPreferences(String userid) throws NotificationTypeNotFoundException, NotificationChannelTypeNotFoundException {
_log.debug("Asking for Notification preferences of " + userid);
_log.trace("Asking for Notification preferences of " + userid);
Map<NotificationType, NotificationChannelType[]> toReturn = new HashMap<NotificationType, NotificationChannelType[]>();
ResultSet result = null;
@ -2314,13 +2285,11 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
e.printStackTrace();
}
//if there are no settings for this user create an entry and put all of them at true
List<Row> results = new ArrayList<>();
if(result!=null) results = result.all();
//_log.debug("Result set empty? " + results.isEmpty());
List<Row> results = result.all();
if (results.isEmpty()) {
_log.debug("Userid " + userid + " settings not found, initiating its preferences...");
_log.info("Userid " + userid + " settings not found, initiating its preferences...");
HashMap<NotificationType, NotificationChannelType[]> toCreate = new HashMap<NotificationType, NotificationChannelType[]>();
for (int i = 0; i < NotificationType.values().length; i++) {
@ -2338,12 +2307,10 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return toCreate;
}
else {
_log.debug("Notification preferences Found for " + userid + " : " + results.size()) ;
_log.trace("Notification preferences Found for " + userid);
for (Row row: results){
String[] channels = row.getString(PREFERENCE).split(",");
//_log.debug("Row : " + row.getString(PREFERENCE));
if (channels != null && channels.length == 1 && channels[0].toString().equals("") ) { //it is empty, preference is set to no notification at all
//_log.debug("adding CHANNELS NULL: " + getNotificationType(row.getString(TYPE)) + ", " + new NotificationChannelType[0]);
toReturn.put(getNotificationType(row.getString(TYPE)), new NotificationChannelType[0]);
} else {
NotificationChannelType[] toAdd = new NotificationChannelType[channels.length];
@ -2352,12 +2319,10 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
toAdd[i] = (getChannelType(channels[i]));
}
}
//_log.debug("adding channels not null: " + getNotificationType(row.getString(TYPE)) + ", " + toAdd.toString());
toReturn.put(getNotificationType(row.getString(TYPE)), toAdd);
}
}
}
_log.debug("Returning:"+toReturn.size());
return toReturn;
}
/*
@ -2432,7 +2397,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
e.printStackTrace();
return false;
}
_log.debug("Writing comment : {}", comment.toString());
_log.info("Writing comment : {}", comment.toString());
CqlSession session = conn.getKeyspaceSession();
List<BoundStatement> boundStatements = insertIntoComments(session, comment);
@ -2443,10 +2408,10 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
try {
ResultSet res = session.execute(writeBatch);
for (ExecutionInfo ex: res.getExecutionInfos()){
_log.debug("Writing comment result errors: {}", ex.getErrors());
_log.debug("Writing comment result payload: {}", ex.getIncomingPayload());
_log.info("Writing comment result errors: {}", ex.getErrors());
_log.info("Writing comment result payload: {}", ex.getIncomingPayload());
}
_log.debug("Writing comment result executed?: {}", res.wasApplied());
_log.info("Writing comment result executed?: {}", res.wasApplied());
} catch (Exception e) {
e.printStackTrace();
@ -2605,7 +2570,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
try {
boolean res = session.execute(writeBatch).wasApplied();
_log.debug("Comments update OK to: " + comment2Edit.getText());
_log.info("Comments update OK to: " + comment2Edit.getText());
return res;
} catch (Exception e) {
@ -2706,7 +2671,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return false;
}
if (isPostLiked(like.getUserid(), feedId)) {
_log.debug("User " + like.getUserid() + " already liked Feed " + feedId);
_log.info("User " + like.getUserid() + " already liked Feed " + feedId);
return true;
}
else {
@ -3158,7 +3123,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
if(feeds.isEmpty()){
_log.debug("There are no feeds containing hashtag " + row.getString(HASHTAG) + " in vre " + vreid);
_log.info("There are no feeds containing hashtag " + row.getString(HASHTAG) + " in vre " + vreid);
continue;
}
@ -3639,8 +3604,8 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
BatchStatement writeBatch = getBatch().addAll(boundStatements);
//boundStatements.forEach(stmt->writeBatch.add(stmt));
ResultSet res = session.execute(writeBatch);
_log.debug(res.getExecutionInfos().toString());
_log.debug(""+res.wasApplied());
_log.info(res.getExecutionInfos().toString());
_log.info(""+res.wasApplied());
/*
session.execute(createNewaAttachEntry(session).bind(
UUID.fromString(toSave.getId()),
@ -3952,7 +3917,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return false;
}
_log.debug("CommentsNo update OK to: " + newCount);
_log.info("CommentsNo update OK to: " + newCount);
return true;
}
@ -3981,7 +3946,7 @@ public final class DBCassandraAstyanaxImpl implements DatabookStore {
return false;
}
_log.debug("LikesNo update OK to: " + newCount);
_log.info("LikesNo update OK to: " + newCount);
return true;
}

View File

@ -49,7 +49,7 @@ public class RunningCluster implements Serializable {
/**
* other constants
*/
private final static String RUNTIME_RESOURCE_NAME = "SocialDB";
private final static String RUNTIME_RESOURCE_NAME = "SocialPortalDataStore";
private final static String PLATFORM_NAME = "Cassandra";
private static final String DEFAULT_CONFIGURATION = "/org/gcube/portal/databook/server/resources/databook.properties";
@ -72,7 +72,7 @@ public class RunningCluster implements Serializable {
* @param infrastructureName could be null
* @return an instance of the RunningCluster
*/
public static synchronized RunningCluster getInstance(String infrastructureName){
public static synchronized RunningCluster getInstance(String infrastructureName) {
if (singleton == null) {
singleton = new RunningCluster(infrastructureName);
}
@ -81,10 +81,9 @@ public class RunningCluster implements Serializable {
/**
* private constructor
*/
private RunningCluster(String infrastructureName){
private RunningCluster(String infrastructureName) {
//Query the IS (for the future)
try{
List<ServiceEndpoint> resources = getConfigurationFromIS(infrastructureName);
/*List<ServiceEndpoint> resources = getConfigurationFromIS(infrastructureName);
if (resources.size() > 1) {
_log.error("Too many Runtime Resource having name " + RUNTIME_RESOURCE_NAME +" in this scope ");
throw new TooManyRunningClustersException("There exist more than 1 Runtime Resource in this scope having name "
@ -98,17 +97,17 @@ public class RunningCluster implements Serializable {
for (ServiceEndpoint res : resources) {
AccessPoint found = res.profile().accessPoints().iterator().next();
host = found.address();
datacenterName = found.description();
clusterName = found.description();
keyspaceName = found.name();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}*/
/*host = "10.1.28.55:9042, 10.1.30.142:9042, 10.1.28.100:9042";
host = "10.1.28.55:9042, 10.1.30.142:9042, 10.1.28.100:9042";
datacenterName = "1";
keyspaceName = "dev_mig_consistent";*/
keyspaceName = "dev_mig_consistent";
}
/**
@ -116,7 +115,7 @@ public class RunningCluster implements Serializable {
* @return the
* @throws Exception
*/
private List<ServiceEndpoint> getConfigurationFromIS(String infrastructureName) {
private List<ServiceEndpoint> getConfigurationFromIS(String infrastructureName) throws Exception {
_log.debug("getConfigurationFromIS infrastructureName="+infrastructureName );
String scope = "/";
if(infrastructureName != null && !infrastructureName.isEmpty())

View File

@ -20,7 +20,6 @@ public class Tester {
public Tester() {
store = new DBCassandraAstyanaxImpl("gcube"); //set to true if you want to drop the KeySpace and recreate it
}
public static void main(String[] args) throws ColumnNameNotFoundException, PrivacyLevelTypeNotFoundException, FeedIDNotFoundException, FeedTypeNotFoundException {