1
0
Fork 0

need to stick on using guava:11.0.2 as it is the version used by the hadoop components (oozie client for sure). The last version (28.2-jre) breaks the oozie workflow submission

This commit is contained in:
Claudio Atzori 2020-03-19 13:58:58 +01:00
parent 1850a02ae4
commit a0ab15a64c
2 changed files with 2 additions and 2 deletions

View File

@ -74,6 +74,6 @@ public class SparkCreateConnectedComponent {
} }
public static long getHashcode(final String id) { public static long getHashcode(final String id) {
return Hashing.murmur3_128().hashUnencodedChars(id).asLong(); return Hashing.murmur3_128().hashString(id).asLong();
} }
} }

View File

@ -503,7 +503,7 @@
<dhp.spark.version>2.4.0.cloudera2</dhp.spark.version> <dhp.spark.version>2.4.0.cloudera2</dhp.spark.version>
<dhp.jackson.version>2.9.6</dhp.jackson.version> <dhp.jackson.version>2.9.6</dhp.jackson.version>
<dhp.commons.lang.version>3.5</dhp.commons.lang.version> <dhp.commons.lang.version>3.5</dhp.commons.lang.version>
<dhp.guava.version>28.2-jre</dhp.guava.version> <dhp.guava.version>11.0.2</dhp.guava.version>
<scala.version>2.11.12</scala.version> <scala.version>2.11.12</scala.version>
<junit.version>4.12</junit.version> <junit.version>4.12</junit.version>
<mongodb.driver.version>3.4.2</mongodb.driver.version> <mongodb.driver.version>3.4.2</mongodb.driver.version>