forked from D-Net/dnet-hadoop
[Measures] addressed comments in the PR
This commit is contained in:
parent
c304657d91
commit
b61efd613b
|
@ -392,7 +392,7 @@ public class OafMapperUtils {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static KeyValue newKeyValueInstance (String key, String value, DataInfo dataInfo){
|
||||
public static KeyValue newKeyValueInstance(String key, String value, DataInfo dataInfo) {
|
||||
KeyValue kv = new KeyValue();
|
||||
kv.setDataInfo(dataInfo);
|
||||
kv.setKey(key);
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
|
@ -31,7 +30,6 @@ import eu.dnetlib.dhp.schema.oaf.Measure;
|
|||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||
|
||||
|
||||
/**
|
||||
* created the Atomic Action for each tipe of results
|
||||
*/
|
||||
|
@ -65,7 +63,7 @@ public class SparkAtomicActionUsageJob implements Serializable {
|
|||
SparkConf conf = new SparkConf();
|
||||
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
||||
|
||||
final String dbname = parser.get("statsdb");
|
||||
final String dbname = parser.get("usagestatsdb");
|
||||
|
||||
final String workingPath = parser.get("workingPath");
|
||||
|
||||
|
@ -130,8 +128,9 @@ public class SparkAtomicActionUsageJob implements Serializable {
|
|||
|
||||
return Arrays
|
||||
.asList(
|
||||
OafMapperUtils.newMeasureInstance("downloads", String.valueOf(downloads), UPDATE_KEY_USAGE_COUNTS, dataInfo),
|
||||
OafMapperUtils.newMeasureInstance("views", String.valueOf(views), UPDATE_KEY_USAGE_COUNTS, dataInfo));
|
||||
OafMapperUtils
|
||||
.newMeasureInstance("downloads", String.valueOf(downloads), UPDATE_KEY_USAGE_COUNTS, dataInfo),
|
||||
OafMapperUtils.newMeasureInstance("views", String.valueOf(views), UPDATE_KEY_USAGE_COUNTS, dataInfo));
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
},
|
||||
{
|
||||
"paramName": "sdb",
|
||||
"paramLongName": "statsdb",
|
||||
"paramLongName": "usagestatsdb",
|
||||
"paramDescription": "the name of the db to be used",
|
||||
"paramRequired": true
|
||||
},
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
<description>the path where to store the actionset</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>statsdb</name>
|
||||
<description>the path where to store the actionset</description>
|
||||
<name>usagestatsdb</name>
|
||||
<description>the name of the db to be used</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
|
@ -88,7 +88,7 @@
|
|||
</spark-opts>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hiveMetastoreUris}</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--statsdb</arg><arg>${statsdb}</arg>
|
||||
<arg>--usagestatsdb</arg><arg>${usagestatsdb}</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/usageDb</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
|
|
Loading…
Reference in New Issue